Merge pull request #1439 from hackmdio/release/2.0.0

Release 2.0.0
This commit is contained in:
Yukai Huang 2020-03-03 18:43:42 +08:00 committed by GitHub
commit 16b9409ef5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
170 changed files with 19288 additions and 16034 deletions

1
.gitignore vendored
View File

@ -1,5 +1,4 @@
node_modules node_modules
package-lock.json
composer.phar composer.phar
composer.lock composer.lock
.env.*.php .env.*.php

View File

@ -7,7 +7,7 @@ node_js:
- "12" - "12"
dist: xenial dist: xenial
cache: yarn cache: npm
matrix: matrix:
fast_finish: true fast_finish: true
@ -19,8 +19,8 @@ matrix:
- node_js: "12" - node_js: "12"
script: script:
- yarn test:ci - npm run test:ci
- yarn build - npm run build
jobs: jobs:
include: include:

1
Procfile Normal file
View File

@ -0,0 +1 @@
web: ./bin/heroku_start.sh

View File

@ -44,7 +44,7 @@ HackMD team is committed to keep CodiMD open source. All contributions are welco
You would find all documentation here: [CodiMD Documentation](https://hackmd.io/c/codimd-documentation) You would find all documentation here: [CodiMD Documentation](https://hackmd.io/c/codimd-documentation)
### Deployment ### Deployment
If you want to spin up an instance and start using immediately, see [Docker deployment](https://hackmd.io/c/codimd-documentation/%2Fs%2Fcodimd-documentation#Deployment). If you want to spin up an instance and start using immediately, see [Docker deployment](https://hackmd.io/c/codimd-documentation/%2Fs%2Fcodimd-docker-deployment).
If you want to contribute to the project, start with [manual deployment](https://hackmd.io/c/codimd-documentation/%2Fs%2Fcodimd-manual-deployment). If you want to contribute to the project, start with [manual deployment](https://hackmd.io/c/codimd-documentation/%2Fs%2Fcodimd-manual-deployment).
### Configuration ### Configuration

33
app.js
View File

@ -24,6 +24,9 @@ var logger = require('./lib/logger')
var response = require('./lib/response') var response = require('./lib/response')
var models = require('./lib/models') var models = require('./lib/models')
var csp = require('./lib/csp') var csp = require('./lib/csp')
const { Environment } = require('./lib/config/enum')
const { versionCheckMiddleware, checkVersion } = require('./lib/web/middleware/checkVersion')
function createHttpServer () { function createHttpServer () {
if (config.useSSL) { if (config.useSSL) {
@ -66,7 +69,7 @@ io.engine.ws = new (require('ws').Server)({
}) })
// others // others
var realtime = require('./lib/realtime.js') var realtime = require('./lib/realtime/realtime.js')
// assign socket io to realtime // assign socket io to realtime
realtime.io = io realtime.io = io
@ -153,7 +156,7 @@ server.on('resumeSession', function (id, cb) {
}) })
// middleware which blocks requests when we're too busy // middleware which blocks requests when we're too busy
app.use(require('./lib/web/middleware/tooBusy')) app.use(require('./lib/middleware/tooBusy'))
app.use(flash()) app.use(flash())
@ -162,10 +165,15 @@ app.use(passport.initialize())
app.use(passport.session()) app.use(passport.session())
// check uri is valid before going further // check uri is valid before going further
app.use(require('./lib/web/middleware/checkURIValid')) app.use(require('./lib/middleware/checkURIValid'))
// redirect url without trailing slashes // redirect url without trailing slashes
app.use(require('./lib/web/middleware/redirectWithoutTrailingSlashes')) app.use(require('./lib/middleware/redirectWithoutTrailingSlashes'))
app.use(require('./lib/web/middleware/codiMDVersion')) app.use(require('./lib/middleware/codiMDVersion'))
if (config.autoVersionCheck && process.env.NODE_ENV === Environment.production) {
checkVersion(app)
app.use(versionCheckMiddleware)
}
// routes need sessions // routes need sessions
// template files // template files
@ -186,6 +194,7 @@ app.locals.authProviders = {
facebook: config.isFacebookEnable, facebook: config.isFacebookEnable,
twitter: config.isTwitterEnable, twitter: config.isTwitterEnable,
github: config.isGitHubEnable, github: config.isGitHubEnable,
bitbucket: config.isBitbucketEnable,
gitlab: config.isGitLabEnable, gitlab: config.isGitLabEnable,
mattermost: config.isMattermostEnable, mattermost: config.isMattermostEnable,
dropbox: config.isDropboxEnable, dropbox: config.isDropboxEnable,
@ -199,23 +208,21 @@ app.locals.authProviders = {
email: config.isEmailEnable, email: config.isEmailEnable,
allowEmailRegister: config.allowEmailRegister allowEmailRegister: config.allowEmailRegister
} }
app.locals.versionInfo = {
latest: true,
versionItem: null
}
// Export/Import menu items // Export/Import menu items
app.locals.enableDropBoxSave = config.isDropboxEnable app.locals.enableDropBoxSave = config.isDropboxEnable
app.locals.enableGitHubGist = config.isGitHubEnable app.locals.enableGitHubGist = config.isGitHubEnable
app.locals.enableGitlabSnippets = config.isGitlabSnippetsEnable app.locals.enableGitlabSnippets = config.isGitlabSnippetsEnable
app.use(require('./lib/web/baseRouter')) app.use(require('./lib/routes').router)
app.use(require('./lib/web/statusRouter'))
app.use(require('./lib/web/auth'))
app.use(require('./lib/web/historyRouter'))
app.use(require('./lib/web/userRouter'))
app.use(require('./lib/web/imageRouter'))
app.use(require('./lib/web/noteRouter'))
// response not found if no any route matxches // response not found if no any route matxches
app.get('*', function (req, res) { app.get('*', function (req, res) {
response.errorNotFound(res) response.errorNotFound(req, res)
}) })
// socket.io secure // socket.io secure

View File

@ -15,124 +15,132 @@
"description": "Let npm also install development build tool", "description": "Let npm also install development build tool",
"value": "false" "value": "false"
}, },
"HMD_SESSION_SECRET": { "CMD_SESSION_SECRET": {
"description": "Secret used to secure session cookies.", "description": "Secret used to secure session cookies.",
"required": false "required": false
}, },
"HMD_HSTS_ENABLE": { "CMD_HSTS_ENABLE": {
"description": "whether to also use HSTS if HTTPS is enabled", "description": "whether to also use HSTS if HTTPS is enabled",
"required": false "required": false
}, },
"HMD_HSTS_MAX_AGE": { "CMD_HSTS_MAX_AGE": {
"description": "max duration, in seconds, to tell clients to keep HSTS status", "description": "max duration, in seconds, to tell clients to keep HSTS status",
"required": false "required": false
}, },
"HMD_HSTS_INCLUDE_SUBDOMAINS": { "CMD_HSTS_INCLUDE_SUBDOMAINS": {
"description": "whether to tell clients to also regard subdomains as HSTS hosts", "description": "whether to tell clients to also regard subdomains as HSTS hosts",
"required": false "required": false
}, },
"HMD_HSTS_PRELOAD": { "CMD_HSTS_PRELOAD": {
"description": "whether to allow at all adding of the site to HSTS preloads (e.g. in browsers)", "description": "whether to allow at all adding of the site to HSTS preloads (e.g. in browsers)",
"required": false "required": false
}, },
"HMD_DOMAIN": { "CMD_DOMAIN": {
"description": "domain name", "description": "domain name",
"required": false "required": false
}, },
"HMD_URL_PATH": { "CMD_URL_PATH": {
"description": "sub url path, like `www.example.com/<URL_PATH>`", "description": "sub url path, like `www.example.com/<URL_PATH>`",
"required": false "required": false
}, },
"HMD_ALLOW_ORIGIN": { "CMD_ALLOW_ORIGIN": {
"description": "domain name whitelist (use comma to separate)", "description": "domain name whitelist (use comma to separate)",
"required": false, "required": false,
"value": "localhost" "value": "localhost"
}, },
"HMD_PROTOCOL_USESSL": { "CMD_PROTOCOL_USESSL": {
"description": "set to use ssl protocol for resources path (only applied when domain is set)", "description": "set to use ssl protocol for resources path (only applied when domain is set)",
"required": false "required": false
}, },
"HMD_URL_ADDPORT": { "CMD_URL_ADDPORT": {
"description": "set to add port on callback url (port 80 or 443 won't applied) (only applied when domain is set)", "description": "set to add port on callback url (port 80 or 443 won't applied) (only applied when domain is set)",
"required": false "required": false
}, },
"HMD_FACEBOOK_CLIENTID": { "CMD_FACEBOOK_CLIENTID": {
"description": "Facebook API client id", "description": "Facebook API client id",
"required": false "required": false
}, },
"HMD_FACEBOOK_CLIENTSECRET": { "CMD_FACEBOOK_CLIENTSECRET": {
"description": "Facebook API client secret", "description": "Facebook API client secret",
"required": false "required": false
}, },
"HMD_TWITTER_CONSUMERKEY": { "CMD_TWITTER_CONSUMERKEY": {
"description": "Twitter API consumer key", "description": "Twitter API consumer key",
"required": false "required": false
}, },
"HMD_TWITTER_CONSUMERSECRET": { "CMD_TWITTER_CONSUMERSECRET": {
"description": "Twitter API consumer secret", "description": "Twitter API consumer secret",
"required": false "required": false
}, },
"HMD_GITHUB_CLIENTID": { "CMD_GITHUB_CLIENTID": {
"description": "GitHub API client id", "description": "GitHub API client id",
"required": false "required": false
}, },
"HMD_GITHUB_CLIENTSECRET": { "CMD_GITHUB_CLIENTSECRET": {
"description": "GitHub API client secret", "description": "GitHub API client secret",
"required": false "required": false
}, },
"HMD_GITLAB_BASEURL": { "CMD_BITBUCKET_CLIENTID": {
"description": "Bitbucket API client id",
"required": false
},
"CMD_BITBUCKET_CLIENTSECRET": {
"description": "Bitbucket API client secret",
"required": false
},
"CMD_GITLAB_BASEURL": {
"description": "GitLab authentication endpoint, set to use other endpoint than GitLab.com (optional)", "description": "GitLab authentication endpoint, set to use other endpoint than GitLab.com (optional)",
"required": false "required": false
}, },
"HMD_GITLAB_CLIENTID": { "CMD_GITLAB_CLIENTID": {
"description": "GitLab API client id", "description": "GitLab API client id",
"required": false "required": false
}, },
"HMD_GITLAB_CLIENTSECRET": { "CMD_GITLAB_CLIENTSECRET": {
"description": "GitLab API client secret", "description": "GitLab API client secret",
"required": false "required": false
}, },
"HMD_GITLAB_SCOPE": { "CMD_GITLAB_SCOPE": {
"description": "GitLab API client scope (optional)", "description": "GitLab API client scope (optional)",
"required": false "required": false
}, },
"HMD_MATTERMOST_BASEURL": { "CMD_MATTERMOST_BASEURL": {
"description": "Mattermost authentication endpoint", "description": "Mattermost authentication endpoint",
"required": false "required": false
}, },
"HMD_MATTERMOST_CLIENTID": { "CMD_MATTERMOST_CLIENTID": {
"description": "Mattermost API client id", "description": "Mattermost API client id",
"required": false "required": false
}, },
"HMD_MATTERMOST_CLIENTSECRET": { "CMD_MATTERMOST_CLIENTSECRET": {
"description": "Mattermost API client secret", "description": "Mattermost API client secret",
"required": false "required": false
}, },
"HMD_DROPBOX_CLIENTID": { "CMD_DROPBOX_CLIENTID": {
"description": "Dropbox API client id", "description": "Dropbox API client id",
"required": false "required": false
}, },
"HMD_DROPBOX_CLIENTSECRET": { "CMD_DROPBOX_CLIENTSECRET": {
"description": "Dropbox API client secret", "description": "Dropbox API client secret",
"required": false "required": false
}, },
"HMD_DROPBOX_APP_KEY": { "CMD_DROPBOX_APP_KEY": {
"description": "Dropbox app key (for import/export)", "description": "Dropbox app key (for import/export)",
"required": false "required": false
}, },
"HMD_GOOGLE_CLIENTID": { "CMD_GOOGLE_CLIENTID": {
"description": "Google API client id", "description": "Google API client id",
"required": false "required": false
}, },
"HMD_GOOGLE_CLIENTSECRET": { "CMD_GOOGLE_CLIENTSECRET": {
"description": "Google API client secret", "description": "Google API client secret",
"required": false "required": false
}, },
"HMD_IMGUR_CLIENTID": { "CMD_IMGUR_CLIENTID": {
"description": "Imgur API client id", "description": "Imgur API client id",
"required": false "required": false
}, },
"HMD_ALLOW_PDF_EXPORT": { "CMD_ALLOW_PDF_EXPORT": {
"description": "Enable or disable PDF exports", "description": "Enable or disable PDF exports",
"required": false "required": false
} }

View File

@ -4,17 +4,7 @@ set -e
if [ ! -z "$DYNO" ]; then if [ ! -z "$DYNO" ]; then
# setup config files # setup config files
cat << EOF > .sequelizerc cp .sequelizerc.example .sequelizerc
var path = require('path');
module.exports = {
'config': path.resolve('config.json'),
'migrations-path': path.resolve('lib', 'migrations'),
'models-path': path.resolve('lib', 'models'),
'url': process.env.DATABASE_URL
}
EOF
cat << EOF > config.json cat << EOF > config.json

5
bin/heroku_start.sh Executable file
View File

@ -0,0 +1,5 @@
#!/bin/bash
set -euo pipefail
CMD_DB_URL="$DATABASE_URL" CMD_PORT="$PORT" npm run start

View File

@ -8,12 +8,11 @@ if [ -d .git ]; then
cd "$(git rev-parse --show-toplevel)" cd "$(git rev-parse --show-toplevel)"
fi fi
if ! type yarn > /dev/null if ! type npm > /dev/null
then then
cat << EOF cat << EOF
yarn is not installed, please install Node.js, npm and yarn. npm is not installed, please install Node.js and npm.
Read more on Node.js official website: https://nodejs.org Read more on Node.js official website: https://nodejs.org
And for yarn package manager at: https://yarnpkg.com/en/
Setup will not be run Setup will not be run
EOF EOF
exit 0 exit 0
@ -29,14 +28,13 @@ if [ ! -f .sequelizerc ]; then
fi fi
echo "install packages" echo "install packages"
yarn install --pure-lockfile npm install
yarn install --production=false --pure-lockfile
cat << EOF cat << EOF
Edit the following config file to setup CodiMD server and client. Edit the following config file to setup CodiMD server and client.
Read more info at https://github.com/hackmdio/codimd#configuration-files Read more info at https://hackmd.io/c/codimd-documentation/%2Fs%2Fcodimd-configuration
* config.json -- CodiMD config * config.json -- CodiMD config
* .sequelizerc -- db config * .sequelizerc -- db config

View File

@ -5,14 +5,13 @@ COPY --chown=hackmd:hackmd . .
RUN set -xe && \ RUN set -xe && \
git reset --hard && \ git reset --hard && \
git clean -fx && \ git clean -fx && \
yarn install && \ npm install && \
yarn build && \ npm run build && \
yarn install --production=true && \
cp ./deployments/docker-entrypoint.sh ./ && \ cp ./deployments/docker-entrypoint.sh ./ && \
cp .sequelizerc.example .sequelizerc && \ cp .sequelizerc.example .sequelizerc && \
rm -rf .git .gitignore .travis.yml .dockerignore .editorconfig .babelrc .mailmap .sequelizerc.example \ rm -rf .git .gitignore .travis.yml .dockerignore .editorconfig .babelrc .mailmap .sequelizerc.example \
test docs contribute \ test docs contribute \
yarn.lock webpack.prod.js webpack.htmlexport.js webpack.dev.js webpack.common.js \ package-lock.json webpack.prod.js webpack.htmlexport.js webpack.dev.js webpack.common.js \
config.json.example README.md CONTRIBUTING.md AUTHORS config.json.example README.md CONTRIBUTING.md AUTHORS
FROM hackmdio/runtime:1.0.6 FROM hackmdio/runtime:1.0.6

View File

@ -1,7 +1,7 @@
version: "3" version: "3"
services: services:
database: database:
image: postgres:11.5 image: postgres:11.6-alpine
environment: environment:
- POSTGRES_USER=codimd - POSTGRES_USER=codimd
- POSTGRES_PASSWORD=change_password - POSTGRES_PASSWORD=change_password
@ -11,10 +11,10 @@ services:
restart: always restart: always
codimd: codimd:
# you can use image or custom build below # you can use image or custom build below
# image: nabo.codimd.dev/hackmdio/hackmd:1.4.0 image: nabo.codimd.dev/hackmdio/hackmd:2.0.0
build: # build:
context: .. # context: ..
dockerfile: ./deployments/Dockerfile # dockerfile: ./deployments/Dockerfile
environment: environment:
- CMD_DB_URL=postgres://codimd:change_password@database/codimd - CMD_DB_URL=postgres://codimd:change_password@database/codimd
- CMD_USECDN=false - CMD_USECDN=false

View File

@ -1,26 +0,0 @@
# Webpack Docs
## `webpack.common.js`
This file contains all common definition for chunks and plugins, that are needed by the whole app.
**TODO:** Document which entry points are used for what.
## `webpack.htmlexport.js`
Separate config for the "save as html" feature.
Packs all CSS from `public/js/htmlExport.js` to `build/html.min.css`.
This file is then downloaded by client-side JS and used to create the HTML.
See `exportToHTML()` in `public/js/extra.js`.
## `webpack.dev.js`
The development config uses both common configs, enables development mode and enables "cheap" source maps (lines only).
If you need more detailed source maps while developing, you might want to use the `source-maps` option.
See https://webpack.js.org/configuration/devtool/ for details.
## `webpack.prod.js`
The production config uses both common configs and enables production mode.
This automatically enables various optimizations (e.g. UglifyJS). See https://webpack.js.org/concepts/mode/ for details.
For the global app config, the name of the emitted chunks is changed to include the content hash.
See https://webpack.js.org/guides/caching/ on why this is a good idea.
For the HTML export config, CSS minification is enabled.

View File

@ -1,38 +0,0 @@
Authentication guide - GitHub
===
***Note:** This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Sign-in or sign-up for a GitHub account
2. Navigate to developer settings in your GitHub account [here](https://github.com/settings/developers) and select the "OAuth Apps" tab
3. Click on the **New OAuth App** button, to create a new OAuth App:
![create-oauth-app](../images/auth/create-oauth-app.png)
4. Fill out the new OAuth application registration form, and click **Register Application**
![register-oauth-application-form](../images/auth/register-oauth-application-form.png)
*Note: The callback URL is <your-hackmd-url>/auth/github/callback*
5. After successfully registering the application, you'll receive the Client ID and Client Secret for the application
![application-page](../images/auth/application-page.png)
6. Add the Client ID and Client Secret to your config.json file or pass them as environment variables
* config.json:
````javascript
{
"production": {
"github": {
"clientID": "3747d30eaccXXXXXXXXX",
"clientSecret": "2a8e682948eee0c580XXXXXXXXXXXXXXXXXXXXXX"
}
}
}
````
* environment variables:
````
HMD_GITHUB_CLIENTID=3747d30eaccXXXXXXXXX
HMD_GITHUB_CLIENTSECRET=2a8e682948eee0c580XXXXXXXXXXXXXXXXXXXXXX
````

View File

@ -1,32 +0,0 @@
# GitLab (self-hosted)
===
***Note:** This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Sign in to your GitLab
2. Navigate to the application management page at `https://your.gitlab.domain/admin/applications` (admin permissions required)
3. Click **New application** to create a new application and fill out the registration form:
![New GitLab application](../images/auth/gitlab-new-application.png)
4. Click **Submit**
5. In the list of applications select **HackMD**. Leave that site open to copy the application ID and secret in the next step.
![Application: HackMD](../images/auth/gitlab-application-details.png)
6. In the `docker-compose.yml` add the following environment variables to `app:` `environment:`
```
- HMD_DOMAIN=your.hackmd.domain
- HMD_URL_ADDPORT=443
- HMD_PROTOCOL_USESSL=true
- HMD_GITLAB_BASEURL=https://your.gitlab.domain
- HMD_GITLAB_CLIENTID=23462a34example99XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
- HMD_GITLAB_CLIENTSECRET=5532e9dexamplXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
```
7. Run `docker-compose up -d` to apply your settings.
8. Sign in to your HackMD using your GitLab ID:
![Sign in via GitLab](../images/auth/gitlab-sign-in.png)

View File

@ -1,42 +0,0 @@
AD LDAP auth
===
To setup your CodiMD instance with Active Directory you need the following configs:
```
CMD_LDAP_URL=ldap://internal.example.com
CMD_LDAP_BINDDN=cn=binduser,cn=Users,dc=internal,dc=example,dc=com
CMD_LDAP_BINDCREDENTIALS=<super secret password>
CMD_LDAP_SEARCHBASE=dc=internal,dc=example,dc=com
CMD_LDAP_SEARCHFILTER=(&(objectcategory=person)(objectclass=user)(|(sAMAccountName={{username}})(mail={{username}})))
CMD_LDAP_USERIDFIELD=sAMAccountName
CMD_LDAP_PROVIDERNAME=Example Inc AD
```
`CMD_LDAP_BINDDN` is either the `distinguishedName` or the `userPrincipalName`. *This can cause "username/password is invalid" when either this value or the password from `CMD_LDAP_BINDCREDENTIALS` are incorrect.*
`CMD_LDAP_SEARCHFILTER` matches on all users and uses either the email address or the `sAMAccountName` (usually the login name you also use to login to Windows).
*Only using `sAMAccountName` looks like this:* `(&(objectcategory=person)(objectclass=user)(sAMAccountName={{username}}))`
`CMD_LDAP_USERIDFIELD` says we want to use `sAMAccountName` as unique identifier for the account itself.
`CMD_LDAP_PROVIDERNAME` just the name written above the username and password field on the login page.
Same in json:
```json
"ldap": {
"url": "ldap://internal.example.com",
"bindDn": "cn=binduser,cn=Users,dc=internal,dc=example,dc=com",
"bindCredentials": "<super secret password>",
"searchBase": "dc=internal,dc=example,dc=com",
"searchFilter": "(&(objectcategory=person)(objectclass=user)(|(sAMAccountName={{username}})(mail={{username}})))",
"useridField": "sAMAccountName",
},
```
More details and example: https://www.npmjs.com/package/passport-ldapauth

View File

@ -1,58 +0,0 @@
Authentication guide - Mattermost (self-hosted)
===
*Note: The Mattermost setup portion of this document is just a quick guide. See the [official documentation](https://docs.mattermost.com/developer/oauth-2-0-applications.html) for more details.*
This guide uses the generic OAuth2 module for compatibility with Mattermost version 5.0 and above.
1. Sign-in with an administrator account to your Mattermost instance
2. Make sure **OAuth 2.0 Service Provider** is enabled in the Main Menu (menu button next to your username in the top left corner) --> System Console --> Custom Integrations menu, which you can find at `https://your.mattermost.domain/admin_console/integrations/custom`
![mattermost-enable-oauth2](../images/auth/mattermost-enable-oauth2.png)
3. Navigate to the OAuth integration settings through Main Menu --> Integrations --> OAuth 2.0 Applications, at `https://your.mattermost.domain/yourteam/integrations/oauth2-apps`
4. Click on the **Add OAuth 2.0 Application** button to add a new OAuth application
![mattermost-oauth-app-add](../images/auth/mattermost-oauth-app-add.png)
5. Fill out the form and click **Save**
![mattermost-oauth-app-form](../images/auth/mattermost-oauth-app-form.png)
*Note: The callback URL is \<your-codimd-url\>/auth/oauth2/callback*
6. After saving the application, you'll receive the Client ID and Client Secret
![mattermost-oauth-app-done](../images/auth/mattermost-oauth-app-done.png)
7. Add the Client ID and Client Secret to your config.json file or pass them as environment variables
* config.json:
````javascript
{
"production": {
"oauth2": {
"baseURL": "https://your.mattermost.domain",
"userProfileURL": "https://your.mattermost.domain/api/v4/users/me",
"userProfileUsernameAttr": "id",
"userProfileDisplayNameAttr": "username",
"userProfileEmailAttr": "email",
"tokenURL": "https://your.mattermost.domain/oauth/access_token",
"authorizationURL": "https://your.mattermost.domain/oauth/authorize",
"clientID": "ii4p1u3jz7dXXXXXXXXXXXXXXX",
"clientSecret": "mqzzx6fydbXXXXXXXXXXXXXXXX"
}
}
}
````
* environment variables:
````
CMD_OAUTH2_BASEURL=https://your.mattermost.domain
CMD_OAUTH2_USER_PROFILE_URL=https://your.mattermost.domain/api/v4/users/me
CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR=id
CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR=username
CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR=email
CMD_OAUTH2_TOKEN_URL=https://your.mattermost.domain/oauth/access_token
CMD_OAUTH2_AUTHORIZATION_URL=https://your.mattermost.domain/oauth/authorize
CMD_OAUTH2_CLIENT_ID=ii4p1u3jz7dXXXXXXXXXXXXXXX
CMD_OAUTH2_CLIENT_SECRET=mqzzx6fydbXXXXXXXXXXXXXXXX
````

View File

@ -1,52 +0,0 @@
Authentication guide - Nextcloud (self-hosted)
===
*This has been constructed using the [Nextcloud OAuth2 Documentation](https://docs.nextcloud.com/server/14/admin_manual/configuration_server/oauth2.html?highlight=oauth2) combined with [this issue comment on the nextcloud bugtracker](https://github.com/nextcloud/server/issues/5694#issuecomment-314761326).*
This guide uses the generic OAuth2 module for compatibility with Nextcloud 13 and above (this guide has been tested successfully with Nextcloud 14).
1. Sign-in with an administrator account to your Nextcloud server
2. Navigate to the OAuth integration settings: Profile Icon (top right) --> Settings
Then choose Security Settings from the *Administration* part of the list - Don't confuse this with Personal Security Settings, where you would change your personal password!
At the top there's OAuth 2.0-Clients.
![Where to find OAuth2 in Nextcloud](../images/auth/nextcloud-oauth2-1-settings.png)
3. Add your CodiMD instance by giving it a *name* (perhaps CodiMD, but could be anything) and a *Redirection-URI*. The Redirection-URI will be `\<your-codimd-url\>/auth/oauth2/callback`. Click <kbd>Add</kbd>.
![Adding a client to Nextcloud](../images/auth/nextcloud-oauth2-2-client-add.png)
4. You'll now see a line containing a *client identifier* and a *Secret*.
![Successfully added OAuth2-client](../images/auth/nextcloud-oauth2-3-clientid-secret.png)
5. That's it for Nextcloud, the rest is configured in your CodiMD `config.json` or via the `CMD_` environment variables!
6. Add the Client ID and Client Secret to your `config.json` file or pass them as environment variables. Make sure you also replace `<your-nextcloud-domain>` with the right domain name.
* `config.json`:
```javascript
{
"production": {
"oauth2": {
"clientID": "ii4p1u3jz7dXXXXXXXXXXXXXXX",
"clientSecret": "mqzzx6fydbXXXXXXXXXXXXXXXX",
"authorizationURL": "https://<your-nextcloud-domain>/apps/oauth2/authorize",
"tokenURL": "https://<your-nextcloud-domain>/apps/oauth2/api/v1/token",
"userProfileURL": "https://<your-nextcloud-domain>/ocs/v2.php/cloud/user?format=json",
"userProfileUsernameAttr": "ocs.data.id",
"userProfileDisplayNameAttr": "ocs.data.display-name",
"userProfileEmailAttr": "ocs.data.email"
}
}
}
```
* environment variables:
```sh
CMD_OAUTH2_CLIENT_ID=ii4p1u3jz7dXXXXXXXXXXXXXXX
CMD_OAUTH2_CLIENT_SECRET=mqzzx6fydbXXXXXXXXXXXXXXXX
CMD_OAUTH2_AUTHORIZATION_URL=https://<your-nextcloud-domain>/apps/oauth2/authorize
CMD_OAUTH2_TOKEN_URL=https://<your-nextcloud-domain>/apps/oauth2/api/v1/token
CMD_OAUTH2_USER_PROFILE_URL=https://<your-nextcloud-domain>/ocs/v2.php/cloud/user?format=json
CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR=ocs.data.id
CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR=ocs.data.display-name
CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR=ocs.data.email
```

View File

@ -1,54 +0,0 @@
Authentication guide - SAML (OneLogin)
===
***Note:** This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Sign-in or sign-up for an OneLogin account. (available free trial for 2 weeks)
2. Go to the administration page.
3. Select the **APPS** menu and click on the **Add Apps**.
![onelogin-add-app](../images/auth/onelogin-add-app.png)
4. Find "SAML Test Connector (SP)" for template of settings and select it.
![onelogin-select-template](../images/auth/onelogin-select-template.png)
5. Edit display name and icons for OneLogin dashboard as you want, and click **SAVE**.
![onelogin-edit-app-name](../images/auth/onelogin-edit-app-name.png)
6. After that other tabs will appear, click the **Configuration**, and fill out the below items, and click **SAVE**.
* RelayState: The base URL of your hackmd, which is issuer. (last slash is not needed)
* ACS (Consumer) URL Validator: The callback URL of your hackmd. (serverurl + /auth/saml/callback)
* ACS (Consumer) URL: same as above.
* Login URL: login URL(SAML requester) of your hackmd. (serverurl + /auth/saml)
![onelogin-edit-sp-metadata](../images/auth/onelogin-edit-sp-metadata.png)
7. The registration is completed. Next, click **SSO** and copy or download the items below.
* X.509 Certificate: Click **View Details** and **DOWNLOAD** or copy the content of certificate ....(A)
* SAML 2.0 Endpoint (HTTP): Copy the URL ....(B)
![onelogin-copy-idp-metadata](../images/auth/onelogin-copy-idp-metadata.png)
8. In your hackmd server, create IdP certificate file from (A)
9. Add the IdP URL (B) and the Idp certificate file path to your config.json file or pass them as environment variables.
* config.json:
````javascript
{
"production": {
"saml": {
"idpSsoUrl": "https://*******.onelogin.com/trust/saml2/http-post/sso/******",
"idpCert": "/path/to/idp_cert.pem"
}
}
}
````
* environment variables
````
HMD_SAML_IDPSSOURL=https://*******.onelogin.com/trust/saml2/http-post/sso/******
HMD_SAML_IDPCERT=/path/to/idp_cert.pem
````
10. Try sign-in with SAML from your hackmd sign-in button or OneLogin dashboard (like the screenshot below).
![onelogin-use-dashboard](../images/auth/onelogin-use-dashboard.png)

View File

@ -1,85 +0,0 @@
Authentication guide - SAML
===
***Note:** This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
The basic procedure is the same as the case of OneLogin which is mentioned in [OneLogin-Guide](./saml-onelogin.md). If you want to match your IdP, you can use more configurations as below.
* If your IdP accepts metadata XML of the service provider to ease configuration, use this url to download metadata XML.
* {{your-serverurl}}/auth/saml/metadata
* _Note: If not accessible from IdP, download to local once and upload to IdP._
* Change the value of `issuer`, `identifierFormat` to match your IdP.
* `issuer`: A unique id to identify the application to the IdP, which is the base URL of your HackMD as default
* `identifierFormat`: A format of unique id to identify the user of IdP, which is the format based on email address as default. It is recommend that you use as below.
* urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress (default)
* urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified
* config.json:
````javascript
{
"production": {
"saml": {
/* omitted */
"issuer": "myhackmd"
"identifierFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified"
}
}
}
````
* environment variables
````
HMD_SAML_ISSUER=myhackmd
HMD_SAML_IDENTIFIERFORMAT=urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified
````
* Change mapping of attribute names to customize the displaying user name and email address to match your IdP.
* `attribute`: A dictionary to map attribute names
* `attribute.id`: A primary key of user table for your HackMD
* `attribute.username`: Attribute name of displaying user name on HackMD
* `attribute.email`: Attribute name of email address, which will be also used for Gravatar
* _Note: Default value of all attributes is NameID of SAML response, which is email address if `identifierFormat` is default._
* config.json:
````javascript
{
"production": {
"saml": {
/* omitted */
"attribute": {
"id": "sAMAccountName",
"username": "displayName",
"email": "mail"
}
}
}
}
````
* environment variables
````
HMD_SAML_ATTRIBUTE_ID=sAMAccountName
HMD_SAML_ATTRIBUTE_USERNAME=nickName
HMD_SAML_ATTRIBUTE_EMAIL=mail
````
* If you want to control permission by group membership, add group attribute name and required group (allowed) or external group (not allowed).
* `groupAttribute`: An attribute name of group membership
* `requiredGroups`: Group names array for allowed access to HackMD. Use vertical bar to separate for environment variables.
* `externalGroups`: Group names array for not allowed access to HackMD. Use vertical bar to separate for environment variables.
* _Note: Evaluates `externalGroups` first_
* config.json:
````javascript
{
"production": {
"saml": {
/* omitted */
"groupAttribute": "memberOf",
"requiredGroups": [ "hackmd-users", "board-members" ],
"externalGroups": [ "temporary-staff" ]
}
}
}
````
* environment variables
````
HMD_SAML_GROUPATTRIBUTE=memberOf
HMD_SAML_REQUIREDGROUPS=hackmd-users|board-members
HMD_SAML_EXTERNALGROUPS=temporary-staff
````

View File

@ -1,44 +0,0 @@
Authentication guide - Twitter
===
***Note:** This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Sign-in or sign-up for a Twitter account
2. Go to the Twitter Application management page [here](https://apps.twitter.com/)
3. Click on the **Create New App** button to create a new Twitter app:
![create-twitter-app](../images/auth/create-twitter-app.png)
4. Fill out the create application form, check the developer agreement box, and click **Create Your Twitter Application**
![register-twitter-application](../images/auth/register-twitter-application.png)
*Note: you may have to register your phone number with Twitter to create a Twitter application*
To do this Click your profile icon --> Settings and privacy --> Mobile --> Select Country/region --> Enter phone number --> Click Continue
5. After you receive confirmation that the Twitter application was created, click **Keys and Access Tokens**
![twitter-app-confirmation](../images/auth/twitter-app-confirmation.png)
6. Obtain your Twitter Consumer Key and Consumer Secret
![twitter-app-keys](../images/auth/twitter-app-keys.png)
7. Add your Consumer Key and Consumer Secret to your config.json file or pass them as environment variables:
* config.json:
````javascript
{
"production": {
"twitter": {
"consumerKey": "esTCJFXXXXXXXXXXXXXXXXXXX",
"consumerSecret": "zpCs4tU86pRVXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
}
}
}
````
* environment variables:
````
HMD_TWITTER_CONSUMERKEY=esTCJFXXXXXXXXXXXXXXXXXXX
HMD_TWITTER_CONSUMERSECRET=zpCs4tU86pRVXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
````

Binary file not shown.

Before

Width:  |  Height:  |  Size: 120 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 113 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 234 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 120 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 180 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 198 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 187 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 159 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 89 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

View File

@ -1,131 +0,0 @@
Pad migration guide from etherpad-lite
===
The goal of this migration is to do a "dumb" import from all the pads in Etherpad, to notes in
CodiMD. In particular, the url locations of the pads in Etherpad will be lost. Furthermore, any
metadata in Etherpad, such as revisions, author data and also formatted text will not be migrated
to CodiMD (only the plain text contents).
Note that this guide is not really meant as a support guide. I migrated my own Etherpad to CodiMD,
and it turned out to be quite easy in my opinion. In this guide I share my experience. Stuff may
require some creativity to work properly in your case. When I wrote this guide, I was using
[Etherpad 1.7.0] and [CodiMD 1.2.1]. Good luck!
[Etherpad 1.7.0]: https://github.com/ether/etherpad-lite/tree/1.7.0
[CodiMD 1.2.1]: https://github.com/hackmdio/codimd/tree/1.2.1
## 0. Requirements
- `curl`
- running Etherpad server
- running CodiMD server
- [codimd-cli]
[codimd-cli]: https://github.com/hackmdio/codimd-cli/blob/master/bin/codimd
## 1. Retrieve the list of pads
First, compose a list of all the pads that you want to have migrated from your Etherpad. Other than
the admin interface, Etherpad does not have a dedicated function to dump a list of all the pads.
However, the Etherpad wiki explains how to list all the pads by [talking directly to the
database][howtolistallpads].
You will end up with a file containing a pad name on each line:
```
date-ideas
groceries
london
weddingchecklist
(...)
```
[howtolistallpads]: https://github.com/ether/etherpad-lite/wiki/How-to-list-all-pads/49701ecdcbe07aea7ad27ffa23aed0d99c2e17db
## 2. Run the migration
Download [codimd-cli] and put the script in the same directory as the file containing the pad names.
Add to this directory the file listed below, I called it `migrate-etherpad.sh`. Modify at least the
configuration settings `ETHERPAD_SERVER` and `CODIMD_SERVER`.
```shell
#!/bin/sh
# migrate-etherpad.sh
#
# Description: Migrate pads from etherpad to codimd
# Author: Daan Sprenkels <hello@dsprenkels.com>
# This script uses the codimd command line script[1] to import a list of pads from
# [1]: https://github.com/hackmdio/codimd-cli/blob/master/bin/codimd
# The base url to where etherpad is hosted
ETHERPAD_SERVER="https://etherpad.example.com"
# The base url where codimd is hosted
CODIMD_SERVER="https://codimd.example.com"
# Write a list of pads and the urls which they were migrated to
REDIRECTS_FILE="redirects.txt"
# Fail if not called correctly
if (( $# != 1 )); then
echo "Usage: $0 PAD_NAMES_FILE"
exit 2
fi
# Do the migration
for PAD_NAME in $1; do
# Download the pad
PAD_FILE="$(mktemp)"
curl "$ETHERPAD_SERVER/p/$PAD_NAME/export/txt" >"$PAD_FILE"
# Import the pad into codimd
OUTPUT="$(./codimd import "$PAD_FILE")"
echo "$PAD_NAME -> $OUTPUT" >>"$REDIRECTS_FILE"
done
```
Call this file like this:
```shell
./migrate-etherpad.sh pad_names.txt
```
This will download all the pads in `pad_names.txt` and put them on CodiMD. They will get assigned
random ids, so you won't be able to find them. The script will save the mappings to a file though
(in my case `redirects.txt`). You can use this file to redirect your users when they visit your
etherpad using a `301 Permanent Redirect` status code (see the next section).
## 3. Setup redirects (optional)
I got a `redirects.txt` file that looked a bit like this:
```
date-ideas -> Found. Redirecting to https://codimd.example.com/mPt0KfiKSBOTQ3mNcdfn
groceries -> Found. Redirecting to https://codimd.example.com/UukqgwLfhYyUUtARlcJ2_y
london -> Found. Redirecting to https://codimd.example.com/_d3wa-BE8t4Swv5w7O2_9R
weddingchecklist -> Found. Redirecting to https://codimd.example.com/XcQGqlBjl0u40wfT0N8TzQ
(...)
```
Using some `sed` magic, I changed it to an nginx config snippet:
```
location = /p/date-ideas {
return 301 https://codimd.example.com/mPt0M1KfiKSBOTQ3mNcdfn;
}
location = /p/groceries {
return 301 https://codimd.example.com/UukqgwLfhYyUUtARlcJ2_y;
}
location = /p/london {
return 301 https://codimd.example.com/_d3wa-BE8t4Swv5w7O2_9R;
}
location = /p/weddingchecklist {
return 301 https://codimd.example.com/XcQGqlBjl0u40wfT0N8TzQ;
}
```
I put this file into my `etherpad.example.com` nginx config, such that all the users would be
redirected accordingly.

View File

@ -1,85 +0,0 @@
Minio Guide for CodiMD
===
***Note:** This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. First of all you need to setup Minio itself.
Please refer to the [official Minio docs](https://docs.minio.io/) for an
production setup.
For checking it out and development purposes a non-persistent setup is enough:
```console
docker run --name test-minio --rm -d -p 9000:9000 minio/minio server /data
```
*Please notice this is not for productive use as all your data gets lost
when you stop this container*
2. Next step is to get the credentials form the container:
```
docker logs test-minio
```
![docker logs](images/minio-image-upload/docker-logs.png)
3. Open http://localhost:9000 and login with the shown credentials.
![minio default view](images/minio-image-upload/default-view.png)
4. Create a bucket for HackMD
![minio create bucket](images/minio-image-upload/create-bucket.png)
5. Add a policy for the prefix `uploads` and make it read-only.
![minio edit policy](images/minio-image-upload/open-edit-policy.png)
*Open policy editor*
![minio policy adding](images/minio-image-upload/create-policy.png)
*Add policy for uploads*
6. Set credentials and configs for Minio in HackMD's `config.json`
```JSON
"minio": {
"accessKey": "888MXJ7EP4XXXXXXXXX",
"secretKey": "yQS2EbM1Y6IJrp/1BUKWq2/XXXXXXXXXXXXXXX",
"endPoint": "localhost",
"port": 9000,
"secure": false
}
```
*You have to use different values for `endpoint` and `port` for a production
setup. Keep in mind the `endpoint`-address has to be public accessible from
your browser.*
7. Set bucket name
```JSON
"s3bucket": "hackmd"
```
8. Set upload type.
```JSON
"imageuploadtype": "minio"
```
9. Review your config.
```json
{
// all your other config…
"minio": {
"accessKey": "888MXJ7EP4XXXXXXXXX",
"secretKey": "yQS2EbM1Y6IJrp/1BUKWq2/XXXXXXXXXXXXXXX",
"endPoint": "localhost",
"port": 9000,
"secure": false
},
"s3bucket": "hackmd",
"imageuploadtype": "minio"
}
```

View File

@ -1,17 +0,0 @@
Setup your terms of use
===
To setup your terms of use, you need to provide a document called `terms-of-use.md` which contains them. Of course written in Markdown.
It has to be provided under `./public/docs/` and will be automatically turned into a CodiMD document. It will also automatically updated as soon as you change the document on disk.
As soon as the file exists a link will show up in the bottom part along with the release notes and link to them.
Setup your privacy policy
===
To add a privacy policy you can use the same technique as for the terms of use. The main difference is that the document is called `privacy.md`.
See our example file `./public/docs/privacy.md.example` container some useful hints for writing your own privacy policy.
As with the terms of use, a link to the privacy notices will show up in the area where the release notes are provided on the index page.

View File

@ -1,83 +0,0 @@
# Guide - Setup CodiMD S3 image upload
***Note:** This guide was written before the renaming. Just replace `HackMD` with `CodiMD` in your mind :smile: thanks!*
1. Go to [AWS S3 console](https://console.aws.amazon.com/s3/home) and create a new bucket.
![create-bucket](images/s3-image-upload/create-bucket.png)
2. Click on bucket, select **Properties** on the side panel, and find **Permission** section. Click **Edit bucket policy**.
![bucket-property](images/s3-image-upload/bucket-property.png)
3. Enter the following policy, replace `bucket_name` with your bucket name:
![bucket-policy-editor](images/s3-image-upload/bucket-policy-editor.png)
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": "*",
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::bucket_name/uploads/*"
}
]
}
```
4. Go to IAM console and create a new IAM user. Remember your user credentials(`key`/`access token`)
5. Enter user page, select **Permission** tab, look at **Inline Policies** section, and click **Create User Policy**
![iam-user](images/s3-image-upload/iam-user.png)
6. Select **Custom Policy**
![custom-policy](images/s3-image-upload/custom-policy.png)
7. Enter the following policy, replace `bucket_name` with your bucket name:
![review-policy](images/s3-image-upload/review-policy.png)
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:*"
],
"Resource": [
"arn:aws:s3:::bucket_name/uploads/*"
]
}
]
}
```
8. Edit `config.json` and set following keys:
```javascript
{
"production": {
...
"imageuploadtype": "s3",
"s3": {
"accessKeyId": "YOUR_S3_ACCESS_KEY_ID",
"secretAccessKey": "YOUR_S3_ACCESS_KEY",
"region": "YOUR_S3_REGION" // example: ap-northeast-1
},
"s3bucket": "YOUR_S3_BUCKET_NAME"
}
}
```
9. In additional to edit `config.json` directly, you could also try [environment variable](https://github.com/hackmdio/hackmd#environment-variables-will-overwrite-other-server-configs).
## Related Tools
* [AWS Policy Generator](http://awspolicygen.s3.amazonaws.com/policygen.html)

View File

@ -0,0 +1,28 @@
'use strict'
const Router = require('express').Router
const passport = require('passport')
const BitbucketStrategy = require('passport-bitbucket-oauth2').Strategy
const config = require('../../config')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const bitbucketAuth = module.exports = Router()
passport.use(new BitbucketStrategy({
clientID: config.bitbucket.clientID,
clientSecret: config.bitbucket.clientSecret,
callbackURL: config.serverURL + '/auth/bitbucket/callback'
}, passportGeneralCallback))
bitbucketAuth.get('/auth/bitbucket', function (req, res, next) {
setReturnToFromReferer(req)
passport.authenticate('bitbucket')(req, res, next)
})
// bitbucket auth callback
bitbucketAuth.get('/auth/bitbucket/callback',
passport.authenticate('bitbucket', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)

View File

@ -3,7 +3,7 @@
const Router = require('express').Router const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const DropboxStrategy = require('passport-dropbox-oauth2').Strategy const DropboxStrategy = require('passport-dropbox-oauth2').Strategy
const config = require('../../../config') const config = require('../../config')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils') const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const dropboxAuth = module.exports = Router() const dropboxAuth = module.exports = Router()

View File

@ -4,12 +4,12 @@ const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const validator = require('validator') const validator = require('validator')
const LocalStrategy = require('passport-local').Strategy const LocalStrategy = require('passport-local').Strategy
const config = require('../../../config') const config = require('../../config')
const models = require('../../../models') const models = require('../../models')
const logger = require('../../../logger') const logger = require('../../logger')
const { setReturnToFromReferer } = require('../utils') const { setReturnToFromReferer } = require('../utils')
const { urlencodedParser } = require('../../utils') const { urlencodedParser } = require('../../utils')
const response = require('../../../response') const response = require('../../response')
const emailAuth = module.exports = Router() const emailAuth = module.exports = Router()
@ -33,8 +33,8 @@ passport.use(new LocalStrategy({
if (config.allowEmailRegister) { if (config.allowEmailRegister) {
emailAuth.post('/register', urlencodedParser, function (req, res, next) { emailAuth.post('/register', urlencodedParser, function (req, res, next) {
if (!req.body.email || !req.body.password) return response.errorBadRequest(res) if (!req.body.email || !req.body.password) return response.errorBadRequest(req, res)
if (!validator.isEmail(req.body.email)) return response.errorBadRequest(res) if (!validator.isEmail(req.body.email)) return response.errorBadRequest(req, res)
models.User.findOrCreate({ models.User.findOrCreate({
where: { where: {
email: req.body.email email: req.body.email
@ -57,14 +57,14 @@ if (config.allowEmailRegister) {
return res.redirect(config.serverURL + '/') return res.redirect(config.serverURL + '/')
}).catch(function (err) { }).catch(function (err) {
logger.error('auth callback failed: ' + err) logger.error('auth callback failed: ' + err)
return response.errorInternalError(res) return response.errorInternalError(req, res)
}) })
}) })
} }
emailAuth.post('/login', urlencodedParser, function (req, res, next) { emailAuth.post('/login', urlencodedParser, function (req, res, next) {
if (!req.body.email || !req.body.password) return response.errorBadRequest(res) if (!req.body.email || !req.body.password) return response.errorBadRequest(req, res)
if (!validator.isEmail(req.body.email)) return response.errorBadRequest(res) if (!validator.isEmail(req.body.email)) return response.errorBadRequest(req, res)
setReturnToFromReferer(req) setReturnToFromReferer(req)
passport.authenticate('local', { passport.authenticate('local', {
successReturnToOrRedirect: config.serverURL + '/', successReturnToOrRedirect: config.serverURL + '/',

View File

@ -4,7 +4,7 @@ const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const FacebookStrategy = require('passport-facebook').Strategy const FacebookStrategy = require('passport-facebook').Strategy
const config = require('../../../config') const config = require('../../config')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils') const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const facebookAuth = module.exports = Router() const facebookAuth = module.exports = Router()

View File

@ -3,8 +3,8 @@
const Router = require('express').Router const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const GithubStrategy = require('passport-github').Strategy const GithubStrategy = require('passport-github').Strategy
const config = require('../../../config') const config = require('../../config')
const response = require('../../../response') const response = require('../../response')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils') const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const { URL } = require('url') const { URL } = require('url')

View File

@ -3,8 +3,8 @@
const Router = require('express').Router const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const GitlabStrategy = require('passport-gitlab2').Strategy const GitlabStrategy = require('passport-gitlab2').Strategy
const config = require('../../../config') const config = require('../../config')
const response = require('../../../response') const response = require('../../response')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils') const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const HttpsProxyAgent = require('https-proxy-agent') const HttpsProxyAgent = require('https-proxy-agent')

View File

@ -3,7 +3,7 @@
const Router = require('express').Router const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
var GoogleStrategy = require('passport-google-oauth20').Strategy var GoogleStrategy = require('passport-google-oauth20').Strategy
const config = require('../../../config') const config = require('../../config')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils') const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const googleAuth = module.exports = Router() const googleAuth = module.exports = Router()
@ -17,7 +17,10 @@ passport.use(new GoogleStrategy({
googleAuth.get('/auth/google', function (req, res, next) { googleAuth.get('/auth/google', function (req, res, next) {
setReturnToFromReferer(req) setReturnToFromReferer(req)
passport.authenticate('google', { scope: ['profile'] })(req, res, next) passport.authenticate('google', {
scope: ['profile'],
hostedDomain: config.google.hostedDomain
})(req, res, next)
}) })
// google auth callback // google auth callback
googleAuth.get('/auth/google/callback', googleAuth.get('/auth/google/callback',

View File

@ -3,9 +3,9 @@
const Router = require('express').Router const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const config = require('../../config') const config = require('../config')
const logger = require('../../logger') const logger = require('../logger')
const models = require('../../models') const models = require('../models')
const authRouter = module.exports = Router() const authRouter = module.exports = Router()
@ -37,6 +37,7 @@ passport.deserializeUser(function (id, done) {
if (config.isFacebookEnable) authRouter.use(require('./facebook')) if (config.isFacebookEnable) authRouter.use(require('./facebook'))
if (config.isTwitterEnable) authRouter.use(require('./twitter')) if (config.isTwitterEnable) authRouter.use(require('./twitter'))
if (config.isGitHubEnable) authRouter.use(require('./github')) if (config.isGitHubEnable) authRouter.use(require('./github'))
if (config.isBitbucketEnable) authRouter.use(require('./bitbucket'))
if (config.isGitLabEnable) authRouter.use(require('./gitlab')) if (config.isGitLabEnable) authRouter.use(require('./gitlab'))
if (config.isMattermostEnable) authRouter.use(require('./mattermost')) if (config.isMattermostEnable) authRouter.use(require('./mattermost'))
if (config.isDropboxEnable) authRouter.use(require('./dropbox')) if (config.isDropboxEnable) authRouter.use(require('./dropbox'))

View File

@ -3,12 +3,12 @@
const Router = require('express').Router const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const LDAPStrategy = require('passport-ldapauth') const LDAPStrategy = require('passport-ldapauth')
const config = require('../../../config') const config = require('../../config')
const models = require('../../../models') const models = require('../../models')
const logger = require('../../../logger') const logger = require('../../logger')
const { setReturnToFromReferer } = require('../utils') const { setReturnToFromReferer } = require('../utils')
const { urlencodedParser } = require('../../utils') const { urlencodedParser } = require('../../utils')
const response = require('../../../response') const response = require('../../response')
const ldapAuth = module.exports = Router() const ldapAuth = module.exports = Router()
@ -81,7 +81,7 @@ passport.use(new LDAPStrategy({
})) }))
ldapAuth.post('/auth/ldap', urlencodedParser, function (req, res, next) { ldapAuth.post('/auth/ldap', urlencodedParser, function (req, res, next) {
if (!req.body.username || !req.body.password) return response.errorBadRequest(res) if (!req.body.username || !req.body.password) return response.errorBadRequest(req, res)
setReturnToFromReferer(req) setReturnToFromReferer(req)
passport.authenticate('ldapauth', { passport.authenticate('ldapauth', {
successReturnToOrRedirect: config.serverURL + '/', successReturnToOrRedirect: config.serverURL + '/',

View File

@ -5,7 +5,7 @@ const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const MattermostClient = require('mattermost-redux/client/client4').default const MattermostClient = require('mattermost-redux/client/client4').default
const OAuthStrategy = require('passport-oauth2').Strategy const OAuthStrategy = require('passport-oauth2').Strategy
const config = require('../../../config') const config = require('../../config')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils') const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const mattermostAuth = module.exports = Router() const mattermostAuth = module.exports = Router()

33
lib/auth/oauth2/index.js Normal file
View File

@ -0,0 +1,33 @@
'use strict'
const Router = require('express').Router
const passport = require('passport')
const config = require('../../config')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const { OAuth2CustomStrategy } = require('./strategy')
const oauth2Auth = module.exports = Router()
passport.use(new OAuth2CustomStrategy({
authorizationURL: config.oauth2.authorizationURL,
tokenURL: config.oauth2.tokenURL,
clientID: config.oauth2.clientID,
clientSecret: config.oauth2.clientSecret,
callbackURL: config.serverURL + '/auth/oauth2/callback',
userProfileURL: config.oauth2.userProfileURL,
scope: config.oauth2.scope
}, passportGeneralCallback))
oauth2Auth.get('/auth/oauth2', function (req, res, next) {
setReturnToFromReferer(req)
passport.authenticate('oauth2')(req, res, next)
})
// github auth callback
oauth2Auth.get('/auth/oauth2/callback',
passport.authenticate('oauth2', {
successReturnToOrRedirect: config.serverURL + '/',
failureRedirect: config.serverURL + '/'
})
)

View File

@ -0,0 +1,74 @@
'use strict'
const { Strategy, InternalOAuthError } = require('passport-oauth2')
const config = require('../../config')
function parseProfile (data) {
const username = extractProfileAttribute(data, config.oauth2.userProfileUsernameAttr)
const displayName = extractProfileAttribute(data, config.oauth2.userProfileDisplayNameAttr)
const email = extractProfileAttribute(data, config.oauth2.userProfileEmailAttr)
if (!username) {
throw new Error('cannot fetch username: please set correct CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR')
}
return {
id: username,
username: username,
displayName: displayName,
email: email
}
}
function extractProfileAttribute (data, path) {
if (!data) return undefined
if (typeof path !== 'string') return undefined
// can handle stuff like `attrs[0].name`
path = path.split('.')
for (const segment of path) {
const m = segment.match(/([\d\w]+)\[(.*)\]/)
if (!m) {
data = data[segment]
} else {
if (m.length < 3) return undefined
if (!data[m[1]]) return undefined
data = data[m[1]][m[2]]
}
if (!data) return undefined
}
return data
}
class OAuth2CustomStrategy extends Strategy {
constructor (options, verify) {
options.customHeaders = options.customHeaders || {}
super(options, verify)
this.name = 'oauth2'
this._userProfileURL = options.userProfileURL
this._oauth2.useAuthorizationHeaderforGET(true)
}
userProfile (accessToken, done) {
this._oauth2.get(this._userProfileURL, accessToken, function (err, body, res) {
if (err) {
return done(new InternalOAuthError('Failed to fetch user profile', err))
}
let profile, json
try {
json = JSON.parse(body)
profile = parseProfile(json)
} catch (ex) {
return done(new InternalOAuthError('Failed to parse user profile' + ex.toString()))
}
profile.provider = 'oauth2'
done(null, profile)
})
}
}
exports.OAuth2CustomStrategy = OAuth2CustomStrategy
exports.parseProfile = parseProfile
exports.extractProfileAttribute = extractProfileAttribute

View File

@ -3,9 +3,9 @@
const Router = require('express').Router const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const OpenIDStrategy = require('@passport-next/passport-openid').Strategy const OpenIDStrategy = require('@passport-next/passport-openid').Strategy
const config = require('../../../config') const config = require('../../config')
const models = require('../../../models') const models = require('../../models')
const logger = require('../../../logger') const logger = require('../../logger')
const { urlencodedParser } = require('../../utils') const { urlencodedParser } = require('../../utils')
const { setReturnToFromReferer } = require('../utils') const { setReturnToFromReferer } = require('../utils')

View File

@ -3,9 +3,9 @@
const Router = require('express').Router const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const SamlStrategy = require('passport-saml').Strategy const SamlStrategy = require('passport-saml').Strategy
const config = require('../../../config') const config = require('../../config')
const models = require('../../../models') const models = require('../../models')
const logger = require('../../../logger') const logger = require('../../logger')
const { urlencodedParser } = require('../../utils') const { urlencodedParser } = require('../../utils')
const fs = require('fs') const fs = require('fs')
const intersection = function (array1, array2) { return array1.filter((n) => array2.includes(n)) } const intersection = function (array1, array2) { return array1.filter((n) => array2.includes(n)) }

View File

@ -4,7 +4,7 @@ const Router = require('express').Router
const passport = require('passport') const passport = require('passport')
const TwitterStrategy = require('passport-twitter').Strategy const TwitterStrategy = require('passport-twitter').Strategy
const config = require('../../../config') const config = require('../../config')
const { setReturnToFromReferer, passportGeneralCallback } = require('../utils') const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const twitterAuth = module.exports = Router() const twitterAuth = module.exports = Router()

View File

@ -1,8 +1,8 @@
'use strict' 'use strict'
const models = require('../../models') const models = require('../models')
const config = require('../../config') const config = require('../config')
const logger = require('../../logger') const logger = require('../logger')
exports.setReturnToFromReferer = function setReturnToFromReferer (req) { exports.setReturnToFromReferer = function setReturnToFromReferer (req) {
var referer = req.get('referer') var referer = req.get('referer')

View File

@ -14,7 +14,7 @@ module.exports = {
hsts: { hsts: {
enable: true, enable: true,
maxAgeSeconds: 60 * 60 * 24 * 365, maxAgeSeconds: 60 * 60 * 24 * 365,
includeSubdomains: true, includeSubdomains: false,
preload: true preload: true
}, },
csp: { csp: {
@ -29,8 +29,8 @@ module.exports = {
}, },
protocolUseSSL: false, protocolUseSSL: false,
useCDN: true, useCDN: true,
allowAnonymous: true, allowAnonymous: false,
allowAnonymousEdits: false, allowAnonymousEdits: true,
allowAnonymousViews: true, allowAnonymousViews: true,
allowFreeURL: false, allowFreeURL: false,
forbiddenNoteIDs: ['robots.txt', 'favicon.ico', 'api'], forbiddenNoteIDs: ['robots.txt', 'favicon.ico', 'api'],
@ -93,7 +93,13 @@ module.exports = {
authorizationURL: undefined, authorizationURL: undefined,
tokenURL: undefined, tokenURL: undefined,
clientID: undefined, clientID: undefined,
clientSecret: undefined clientSecret: undefined,
baseURL: undefined,
userProfileURL: undefined,
userProfileUsernameAttr: 'username',
userProfileDisplayNameAttr: 'displayName',
userProfileEmailAttr: 'email',
scope: 'email'
}, },
facebook: { facebook: {
clientID: undefined, clientID: undefined,
@ -127,7 +133,8 @@ module.exports = {
}, },
google: { google: {
clientID: undefined, clientID: undefined,
clientSecret: undefined clientSecret: undefined,
hostedDomain: undefined
}, },
ldap: { ldap: {
providerName: undefined, providerName: undefined,
@ -178,5 +185,6 @@ module.exports = {
// Generated id: "31-good-morning-my-friend---do-you-have-5" // Generated id: "31-good-morning-my-friend---do-you-have-5"
// 2nd appearance: "31-good-morning-my-friend---do-you-have-5-1" // 2nd appearance: "31-good-morning-my-friend---do-you-have-5-1"
// 3rd appearance: "31-good-morning-my-friend---do-you-have-5-2" // 3rd appearance: "31-good-morning-my-friend---do-you-have-5-2"
linkifyHeaderStyle: 'keep-case' linkifyHeaderStyle: 'keep-case',
autoVersionCheck: true
} }

View File

@ -70,6 +70,10 @@ module.exports = {
clientID: process.env.CMD_GITHUB_CLIENTID, clientID: process.env.CMD_GITHUB_CLIENTID,
clientSecret: process.env.CMD_GITHUB_CLIENTSECRET clientSecret: process.env.CMD_GITHUB_CLIENTSECRET
}, },
bitbucket: {
clientID: process.env.CMD_BITBUCKET_CLIENTID,
clientSecret: process.env.CMD_BITBUCKET_CLIENTSECRET
},
gitlab: { gitlab: {
baseURL: process.env.CMD_GITLAB_BASEURL, baseURL: process.env.CMD_GITLAB_BASEURL,
clientID: process.env.CMD_GITLAB_CLIENTID, clientID: process.env.CMD_GITLAB_CLIENTID,
@ -84,14 +88,15 @@ module.exports = {
oauth2: { oauth2: {
providerName: process.env.CMD_OAUTH2_PROVIDERNAME, providerName: process.env.CMD_OAUTH2_PROVIDERNAME,
baseURL: process.env.CMD_OAUTH2_BASEURL, baseURL: process.env.CMD_OAUTH2_BASEURL,
clientID: process.env.CMD_OAUTH2_CLIENT_ID,
clientSecret: process.env.CMD_OAUTH2_CLIENT_SECRET,
authorizationURL: process.env.CMD_OAUTH2_AUTHORIZATION_URL,
tokenURL: process.env.CMD_OAUTH2_TOKEN_URL,
userProfileURL: process.env.CMD_OAUTH2_USER_PROFILE_URL, userProfileURL: process.env.CMD_OAUTH2_USER_PROFILE_URL,
scope: process.env.CMD_OAUTH2_SCOPE,
userProfileUsernameAttr: process.env.CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR, userProfileUsernameAttr: process.env.CMD_OAUTH2_USER_PROFILE_USERNAME_ATTR,
userProfileDisplayNameAttr: process.env.CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR, userProfileDisplayNameAttr: process.env.CMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR,
userProfileEmailAttr: process.env.CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR, userProfileEmailAttr: process.env.CMD_OAUTH2_USER_PROFILE_EMAIL_ATTR
tokenURL: process.env.CMD_OAUTH2_TOKEN_URL,
authorizationURL: process.env.CMD_OAUTH2_AUTHORIZATION_URL,
clientID: process.env.CMD_OAUTH2_CLIENT_ID,
clientSecret: process.env.CMD_OAUTH2_CLIENT_SECRET
}, },
dropbox: { dropbox: {
clientID: process.env.CMD_DROPBOX_CLIENTID, clientID: process.env.CMD_DROPBOX_CLIENTID,
@ -100,7 +105,8 @@ module.exports = {
}, },
google: { google: {
clientID: process.env.CMD_GOOGLE_CLIENTID, clientID: process.env.CMD_GOOGLE_CLIENTID,
clientSecret: process.env.CMD_GOOGLE_CLIENTSECRET clientSecret: process.env.CMD_GOOGLE_CLIENTSECRET,
hostedDomain: process.env.CMD_GOOGLE_HOSTEDDOMAIN
}, },
ldap: { ldap: {
providerName: process.env.CMD_LDAP_PROVIDERNAME, providerName: process.env.CMD_LDAP_PROVIDERNAME,
@ -138,5 +144,6 @@ module.exports = {
allowPDFExport: toBooleanConfig(process.env.CMD_ALLOW_PDF_EXPORT), allowPDFExport: toBooleanConfig(process.env.CMD_ALLOW_PDF_EXPORT),
openID: toBooleanConfig(process.env.CMD_OPENID), openID: toBooleanConfig(process.env.CMD_OPENID),
defaultUseHardbreak: toBooleanConfig(process.env.CMD_DEFAULT_USE_HARD_BREAK), defaultUseHardbreak: toBooleanConfig(process.env.CMD_DEFAULT_USE_HARD_BREAK),
linkifyHeaderStyle: process.env.CMD_LINKIFY_HEADER_STYLE linkifyHeaderStyle: process.env.CMD_LINKIFY_HEADER_STYLE,
autoVersionCheck: toBooleanConfig(process.env.CMD_AUTO_VERSION_CHECK)
} }

View File

@ -1,126 +0,0 @@
'use strict'
const { toBooleanConfig, toArrayConfig, toIntegerConfig } = require('./utils')
module.exports = {
domain: process.env.HMD_DOMAIN,
urlPath: process.env.HMD_URL_PATH,
port: toIntegerConfig(process.env.HMD_PORT),
urlAddPort: toBooleanConfig(process.env.HMD_URL_ADDPORT),
useSSL: toBooleanConfig(process.env.HMD_USESSL),
hsts: {
enable: toBooleanConfig(process.env.HMD_HSTS_ENABLE),
maxAgeSeconds: toIntegerConfig(process.env.HMD_HSTS_MAX_AGE),
includeSubdomains: toBooleanConfig(process.env.HMD_HSTS_INCLUDE_SUBDOMAINS),
preload: toBooleanConfig(process.env.HMD_HSTS_PRELOAD)
},
csp: {
enable: toBooleanConfig(process.env.HMD_CSP_ENABLE),
reportURI: process.env.HMD_CSP_REPORTURI
},
protocolUseSSL: toBooleanConfig(process.env.HMD_PROTOCOL_USESSL),
allowOrigin: toArrayConfig(process.env.HMD_ALLOW_ORIGIN),
useCDN: toBooleanConfig(process.env.HMD_USECDN),
allowAnonymous: toBooleanConfig(process.env.HMD_ALLOW_ANONYMOUS),
allowAnonymousEdits: toBooleanConfig(process.env.HMD_ALLOW_ANONYMOUS_EDITS),
allowFreeURL: toBooleanConfig(process.env.HMD_ALLOW_FREEURL),
defaultPermission: process.env.HMD_DEFAULT_PERMISSION,
dbURL: process.env.HMD_DB_URL,
sessionSecret: process.env.HMD_SESSION_SECRET,
sessionLife: toIntegerConfig(process.env.HMD_SESSION_LIFE),
responseMaxLag: toIntegerConfig(process.env.HMD_RESPONSE_MAX_LAG),
imageUploadType: process.env.HMD_IMAGE_UPLOAD_TYPE,
imgur: {
clientID: process.env.HMD_IMGUR_CLIENTID
},
s3: {
accessKeyId: process.env.HMD_S3_ACCESS_KEY_ID,
secretAccessKey: process.env.HMD_S3_SECRET_ACCESS_KEY,
region: process.env.HMD_S3_REGION
},
minio: {
accessKey: process.env.HMD_MINIO_ACCESS_KEY,
secretKey: process.env.HMD_MINIO_SECRET_KEY,
endPoint: process.env.HMD_MINIO_ENDPOINT,
secure: toBooleanConfig(process.env.HMD_MINIO_SECURE),
port: toIntegerConfig(process.env.HMD_MINIO_PORT)
},
s3bucket: process.env.HMD_S3_BUCKET,
azure: {
connectionString: process.env.HMD_AZURE_CONNECTION_STRING,
container: process.env.HMD_AZURE_CONTAINER
},
facebook: {
clientID: process.env.HMD_FACEBOOK_CLIENTID,
clientSecret: process.env.HMD_FACEBOOK_CLIENTSECRET
},
twitter: {
consumerKey: process.env.HMD_TWITTER_CONSUMERKEY,
consumerSecret: process.env.HMD_TWITTER_CONSUMERSECRET
},
github: {
clientID: process.env.HMD_GITHUB_CLIENTID,
clientSecret: process.env.HMD_GITHUB_CLIENTSECRET
},
gitlab: {
baseURL: process.env.HMD_GITLAB_BASEURL,
clientID: process.env.HMD_GITLAB_CLIENTID,
clientSecret: process.env.HMD_GITLAB_CLIENTSECRET,
scope: process.env.HMD_GITLAB_SCOPE
},
mattermost: {
baseURL: process.env.HMD_MATTERMOST_BASEURL,
clientID: process.env.HMD_MATTERMOST_CLIENTID,
clientSecret: process.env.HMD_MATTERMOST_CLIENTSECRET
},
oauth2: {
baseURL: process.env.HMD_OAUTH2_BASEURL,
userProfileURL: process.env.HMD_OAUTH2_USER_PROFILE_URL,
userProfileUsernameAttr: process.env.HMD_OAUTH2_USER_PROFILE_USERNAME_ATTR,
userProfileDisplayNameAttr: process.env.HMD_OAUTH2_USER_PROFILE_DISPLAY_NAME_ATTR,
userProfileEmailAttr: process.env.HMD_OAUTH2_USER_PROFILE_EMAIL_ATTR,
tokenURL: process.env.HMD_OAUTH2_TOKEN_URL,
authorizationURL: process.env.HMD_OAUTH2_AUTHORIZATION_URL,
clientID: process.env.HMD_OAUTH2_CLIENT_ID,
clientSecret: process.env.HMD_OAUTH2_CLIENT_SECRET
},
dropbox: {
clientID: process.env.HMD_DROPBOX_CLIENTID,
clientSecret: process.env.HMD_DROPBOX_CLIENTSECRET,
appKey: process.env.HMD_DROPBOX_APPKEY
},
google: {
clientID: process.env.HMD_GOOGLE_CLIENTID,
clientSecret: process.env.HMD_GOOGLE_CLIENTSECRET
},
ldap: {
providerName: process.env.HMD_LDAP_PROVIDERNAME,
url: process.env.HMD_LDAP_URL,
bindDn: process.env.HMD_LDAP_BINDDN,
bindCredentials: process.env.HMD_LDAP_BINDCREDENTIALS,
searchBase: process.env.HMD_LDAP_SEARCHBASE,
searchFilter: process.env.HMD_LDAP_SEARCHFILTER,
searchAttributes: toArrayConfig(process.env.HMD_LDAP_SEARCHATTRIBUTES),
usernameField: process.env.HMD_LDAP_USERNAMEFIELD,
useridField: process.env.HMD_LDAP_USERIDFIELD,
tlsca: process.env.HMD_LDAP_TLS_CA
},
saml: {
idpSsoUrl: process.env.HMD_SAML_IDPSSOURL,
idpCert: process.env.HMD_SAML_IDPCERT,
issuer: process.env.HMD_SAML_ISSUER,
identifierFormat: process.env.HMD_SAML_IDENTIFIERFORMAT,
disableRequestedAuthnContext: toBooleanConfig(process.env.HMD_SAML_DISABLEREQUESTEDAUTHNCONTEXT),
groupAttribute: process.env.HMD_SAML_GROUPATTRIBUTE,
externalGroups: toArrayConfig(process.env.HMD_SAML_EXTERNALGROUPS, '|', []),
requiredGroups: toArrayConfig(process.env.HMD_SAML_REQUIREDGROUPS, '|', []),
attribute: {
id: process.env.HMD_SAML_ATTRIBUTE_ID,
username: process.env.HMD_SAML_ATTRIBUTE_USERNAME,
email: process.env.HMD_SAML_ATTRIBUTE_EMAIL
}
},
email: toBooleanConfig(process.env.HMD_EMAIL),
allowEmailRegister: toBooleanConfig(process.env.HMD_ALLOW_EMAIL_REGISTER),
allowPDFExport: toBooleanConfig(process.env.HMD_ALLOW_PDF_EXPORT)
}

View File

@ -36,12 +36,9 @@ const fileConfig = fs.existsSync(configFilePath) ? require(configFilePath)[env]
let config = require('./default') let config = require('./default')
merge(config, require('./defaultSSL')) merge(config, require('./defaultSSL'))
merge(config, require('./oldDefault'))
merge(config, debugConfig) merge(config, debugConfig)
merge(config, packageConfig) merge(config, packageConfig)
merge(config, fileConfig) merge(config, fileConfig)
merge(config, require('./oldEnvironment'))
merge(config, require('./hackmdEnvironment'))
merge(config, require('./environment')) merge(config, require('./environment'))
merge(config, require('./dockerSecret')) merge(config, require('./dockerSecret'))
@ -121,6 +118,7 @@ config.isTwitterEnable = config.twitter.consumerKey && config.twitter.consumerSe
config.isEmailEnable = config.email config.isEmailEnable = config.email
config.isOpenIDEnable = config.openID config.isOpenIDEnable = config.openID
config.isGitHubEnable = config.github.clientID && config.github.clientSecret config.isGitHubEnable = config.github.clientID && config.github.clientSecret
config.isBitbucketEnable = config.bitbucket.clientID && config.bitbucket.clientSecret
config.isGitLabEnable = config.gitlab.clientID && config.gitlab.clientSecret config.isGitLabEnable = config.gitlab.clientID && config.gitlab.clientSecret
config.isMattermostEnable = config.mattermost.clientID && config.mattermost.clientSecret config.isMattermostEnable = config.mattermost.clientID && config.mattermost.clientSecret
config.isLDAPEnable = config.ldap.url config.isLDAPEnable = config.ldap.url
@ -158,7 +156,7 @@ for (let i = keys.length; i--;) {
// Notify users about the prefix change and inform them they use legacy prefix for environment variables // Notify users about the prefix change and inform them they use legacy prefix for environment variables
if (Object.keys(process.env).toString().indexOf('HMD_') !== -1) { if (Object.keys(process.env).toString().indexOf('HMD_') !== -1) {
logger.warn('Using legacy HMD prefix for environment variables. Please change your variables in future. For details see: https://github.com/hackmdio/codimd#environment-variables-will-overwrite-other-server-configs') logger.warn('Using legacy HMD prefix for environment variables. Please change your variables in future. For details see: https://hackmd.io/c/codimd-documentation/%2F%40codimd%2Fmigrate-2-0#1-Drop-old-environment-variables-support')
} }
// Generate session secret if it stays on default values // Generate session secret if it stays on default values

View File

@ -1,42 +0,0 @@
'use strict'
module.exports = {
urlpath: undefined,
urladdport: undefined,
alloworigin: undefined,
usessl: undefined,
protocolusessl: undefined,
usecdn: undefined,
allowanonymous: undefined,
allowanonymousedits: undefined,
allowfreeurl: undefined,
defaultpermission: undefined,
dburl: undefined,
// ssl path
sslkeypath: undefined,
sslcertpath: undefined,
sslcapath: undefined,
dhparampath: undefined,
// other path
tmppath: undefined,
defaultnotepath: undefined,
docspath: undefined,
indexpath: undefined,
hackmdpath: undefined,
errorpath: undefined,
prettypath: undefined,
slidepath: undefined,
// session
sessionname: undefined,
sessionsecret: undefined,
sessionlife: undefined,
staticcachetime: undefined,
// socket.io
heartbeatinterval: undefined,
heartbeattimeout: undefined,
// document
documentmaxlength: undefined,
imageuploadtype: undefined,
allowemailregister: undefined,
allowpdfexport: undefined
}

View File

@ -1,10 +0,0 @@
'use strict'
const { toBooleanConfig } = require('./utils')
module.exports = {
debug: toBooleanConfig(process.env.DEBUG),
dburl: process.env.DATABASE_URL,
urlpath: process.env.URL_PATH,
port: process.env.PORT
}

View File

@ -7,7 +7,7 @@ var defaultDirectives = {
defaultSrc: ['\'self\''], defaultSrc: ['\'self\''],
scriptSrc: ['\'self\'', 'vimeo.com', 'https://gist.github.com', 'www.slideshare.net', 'https://query.yahooapis.com', '\'unsafe-eval\''], scriptSrc: ['\'self\'', 'vimeo.com', 'https://gist.github.com', 'www.slideshare.net', 'https://query.yahooapis.com', '\'unsafe-eval\''],
// ^ TODO: Remove unsafe-eval - webpack script-loader issues https://github.com/hackmdio/codimd/issues/594 // ^ TODO: Remove unsafe-eval - webpack script-loader issues https://github.com/hackmdio/codimd/issues/594
imgSrc: ['*'], imgSrc: ['*', 'data:'],
styleSrc: ['\'self\'', '\'unsafe-inline\'', 'https://github.githubassets.com'], // unsafe-inline is required for some libs, plus used in views styleSrc: ['\'self\'', '\'unsafe-inline\'', 'https://github.githubassets.com'], // unsafe-inline is required for some libs, plus used in views
fontSrc: ['\'self\'', 'data:', 'https://public.slidesharecdn.com'], fontSrc: ['\'self\'', 'data:', 'https://public.slidesharecdn.com'],
objectSrc: ['*'], // Chrome PDF viewer treats PDFs as objects :/ objectSrc: ['*'], // Chrome PDF viewer treats PDFs as objects :/
@ -16,9 +16,13 @@ var defaultDirectives = {
connectSrc: ['*'] connectSrc: ['*']
} }
var dropboxDirectives = {
scriptSrc: ['https://www.dropbox.com']
}
var cdnDirectives = { var cdnDirectives = {
scriptSrc: ['https://cdnjs.cloudflare.com', 'https://cdn.mathjax.org'], scriptSrc: ['https://cdnjs.cloudflare.com', 'https://cdn.jsdelivr.net', 'https://cdn.mathjax.org'],
styleSrc: ['https://cdnjs.cloudflare.com', 'https://fonts.googleapis.com'], styleSrc: ['https://cdnjs.cloudflare.com', 'https://cdn.jsdelivr.net', 'https://fonts.googleapis.com'],
fontSrc: ['https://cdnjs.cloudflare.com', 'https://fonts.gstatic.com'] fontSrc: ['https://cdnjs.cloudflare.com', 'https://fonts.gstatic.com']
} }
@ -37,6 +41,7 @@ CspStrategy.computeDirectives = function () {
mergeDirectives(directives, config.csp.directives) mergeDirectives(directives, config.csp.directives)
mergeDirectivesIf(config.csp.addDefaults, directives, defaultDirectives) mergeDirectivesIf(config.csp.addDefaults, directives, defaultDirectives)
mergeDirectivesIf(config.useCDN, directives, cdnDirectives) mergeDirectivesIf(config.useCDN, directives, cdnDirectives)
mergeDirectivesIf(config.dropbox && config.dropbox.appKey, directives, dropboxDirectives)
mergeDirectivesIf(config.csp.addDisqus, directives, disqusDirectives) mergeDirectivesIf(config.csp.addDisqus, directives, disqusDirectives)
mergeDirectivesIf(config.csp.addGoogleAnalytics, directives, googleAnalyticsDirectives) mergeDirectivesIf(config.csp.addGoogleAnalytics, directives, googleAnalyticsDirectives)
if (!areAllInlineScriptsAllowed(directives)) { if (!areAllInlineScriptsAllowed(directives)) {
@ -71,7 +76,7 @@ function addInlineScriptExceptions (directives) {
directives.scriptSrc.push(getCspNonce) directives.scriptSrc.push(getCspNonce)
// TODO: This is the SHA-256 hash of the inline script in build/reveal.js/plugins/notes/notes.html // TODO: This is the SHA-256 hash of the inline script in build/reveal.js/plugins/notes/notes.html
// Any more clean solution appreciated. // Any more clean solution appreciated.
directives.scriptSrc.push('\'sha256-Lc+VnBdinzYTTAkFrIoUqdoA9EQFeS1AF9ybmF+LLfM=\'') directives.scriptSrc.push('\'sha256-81acLZNZISnyGYZrSuoYhpzwDTTxi7vC1YM4uNxqWaM=\'')
} }
function getCspNonce (req, res) { function getCspNonce (req, res) {

21
lib/errorPage/index.js Normal file
View File

@ -0,0 +1,21 @@
'use strict'
const config = require('../config')
const { responseError } = require('../response')
exports.errorForbidden = (req, res) => {
if (req.user) {
return responseError(res, '403', 'Forbidden', 'oh no.')
}
req.flash('error', 'You are not allowed to access this page. Maybe try logging in?')
res.redirect(config.serverURL + '/')
}
exports.errorNotFound = (req, res) => {
responseError(res, '404', 'Not Found', 'oops.')
}
exports.errorInternalError = (req, res) => {
responseError(res, '500', 'Internal Error', 'wtf.')
}

View File

@ -4,10 +4,10 @@
var LZString = require('@hackmd/lz-string') var LZString = require('@hackmd/lz-string')
// core // core
var config = require('./config') var config = require('../config')
var logger = require('./logger') var logger = require('../logger')
var response = require('./response') var response = require('../response')
var models = require('./models') var models = require('../models')
function getHistory (userid, callback) { function getHistory (userid, callback) {
models.User.findOne({ models.User.findOne({
@ -116,14 +116,14 @@ function parseHistoryToObject (history) {
function historyGet (req, res) { function historyGet (req, res) {
if (req.isAuthenticated()) { if (req.isAuthenticated()) {
getHistory(req.user.id, function (err, history) { getHistory(req.user.id, function (err, history) {
if (err) return response.errorInternalError(res) if (err) return response.errorInternalError(req, res)
if (!history) return response.errorNotFound(res) if (!history) return response.errorNotFound(req, res)
res.send({ res.send({
history: parseHistoryToArray(history) history: parseHistoryToArray(history)
}) })
}) })
} else { } else {
return response.errorForbidden(res) return response.errorForbidden(req, res)
} }
} }
@ -131,40 +131,40 @@ function historyPost (req, res) {
if (req.isAuthenticated()) { if (req.isAuthenticated()) {
var noteId = req.params.noteId var noteId = req.params.noteId
if (!noteId) { if (!noteId) {
if (typeof req.body['history'] === 'undefined') return response.errorBadRequest(res) if (typeof req.body['history'] === 'undefined') return response.errorBadRequest(req, res)
if (config.debug) { logger.info('SERVER received history from [' + req.user.id + ']: ' + req.body.history) } if (config.debug) { logger.info('SERVER received history from [' + req.user.id + ']: ' + req.body.history) }
try { try {
var history = JSON.parse(req.body.history) var history = JSON.parse(req.body.history)
} catch (err) { } catch (err) {
return response.errorBadRequest(res) return response.errorBadRequest(req, res)
} }
if (Array.isArray(history)) { if (Array.isArray(history)) {
setHistory(req.user.id, history, function (err, count) { setHistory(req.user.id, history, function (err, count) {
if (err) return response.errorInternalError(res) if (err) return response.errorInternalError(req, res)
res.end() res.end()
}) })
} else { } else {
return response.errorBadRequest(res) return response.errorBadRequest(req, res)
} }
} else { } else {
if (typeof req.body['pinned'] === 'undefined') return response.errorBadRequest(res) if (typeof req.body['pinned'] === 'undefined') return response.errorBadRequest(req, res)
getHistory(req.user.id, function (err, history) { getHistory(req.user.id, function (err, history) {
if (err) return response.errorInternalError(res) if (err) return response.errorInternalError(req, res)
if (!history) return response.errorNotFound(res) if (!history) return response.errorNotFound(req, res)
if (!history[noteId]) return response.errorNotFound(res) if (!history[noteId]) return response.errorNotFound(req, res)
if (req.body.pinned === 'true' || req.body.pinned === 'false') { if (req.body.pinned === 'true' || req.body.pinned === 'false') {
history[noteId].pinned = (req.body.pinned === 'true') history[noteId].pinned = (req.body.pinned === 'true')
setHistory(req.user.id, history, function (err, count) { setHistory(req.user.id, history, function (err, count) {
if (err) return response.errorInternalError(res) if (err) return response.errorInternalError(req, res)
res.end() res.end()
}) })
} else { } else {
return response.errorBadRequest(res) return response.errorBadRequest(req, res)
} }
}) })
} }
} else { } else {
return response.errorForbidden(res) return response.errorForbidden(req, res)
} }
} }
@ -173,22 +173,22 @@ function historyDelete (req, res) {
var noteId = req.params.noteId var noteId = req.params.noteId
if (!noteId) { if (!noteId) {
setHistory(req.user.id, [], function (err, count) { setHistory(req.user.id, [], function (err, count) {
if (err) return response.errorInternalError(res) if (err) return response.errorInternalError(req, res)
res.end() res.end()
}) })
} else { } else {
getHistory(req.user.id, function (err, history) { getHistory(req.user.id, function (err, history) {
if (err) return response.errorInternalError(res) if (err) return response.errorInternalError(req, res)
if (!history) return response.errorNotFound(res) if (!history) return response.errorNotFound(req, res)
delete history[noteId] delete history[noteId]
setHistory(req.user.id, history, function (err, count) { setHistory(req.user.id, history, function (err, count) {
if (err) return response.errorInternalError(res) if (err) return response.errorInternalError(req, res)
res.end() res.end()
}) })
}) })
} }
} else { } else {
return response.errorForbidden(res) return response.errorForbidden(req, res)
} }
} }

38
lib/homepage/index.js Normal file
View File

@ -0,0 +1,38 @@
'use strict'
const fs = require('fs')
const path = require('path')
const config = require('../config')
const { User } = require('../models')
const logger = require('../logger')
exports.showIndex = async (req, res) => {
const isLogin = req.isAuthenticated()
const deleteToken = ''
const data = {
signin: isLogin,
infoMessage: req.flash('info'),
errorMessage: req.flash('error'),
privacyStatement: fs.existsSync(path.join(config.docsPath, 'privacy.md')),
termsOfUse: fs.existsSync(path.join(config.docsPath, 'terms-of-use.md')),
deleteToken: deleteToken
}
if (!isLogin) {
return res.render('index.ejs', data)
}
const user = await User.findOne({
where: {
id: req.user.id
}
})
if (user) {
data.deleteToken = user.deleteToken
return res.render('index.ejs', data)
}
logger.error(`error: user not found with id ${req.user.id}`)
return res.render('index.ejs', data)
}

View File

@ -1,8 +1,8 @@
'use strict' 'use strict'
const path = require('path') const path = require('path')
const config = require('../../config') const config = require('../config')
const logger = require('../../logger') const logger = require('../logger')
const azure = require('azure-storage') const azure = require('azure-storage')

View File

@ -2,8 +2,8 @@
const URL = require('url').URL const URL = require('url').URL
const path = require('path') const path = require('path')
const config = require('../../config') const config = require('../config')
const logger = require('../../logger') const logger = require('../logger')
exports.uploadImage = function (imagePath, callback) { exports.uploadImage = function (imagePath, callback) {
if (!imagePath || typeof imagePath !== 'string') { if (!imagePath || typeof imagePath !== 'string') {

View File

@ -1,6 +1,6 @@
'use strict' 'use strict'
const config = require('../../config') const config = require('../config')
const logger = require('../../logger') const logger = require('../logger')
const imgur = require('@hackmd/imgur') const imgur = require('@hackmd/imgur')

View File

@ -3,9 +3,9 @@
const Router = require('express').Router const Router = require('express').Router
const formidable = require('formidable') const formidable = require('formidable')
const config = require('../../config') const config = require('../config')
const logger = require('../../logger') const logger = require('../logger')
const response = require('../../response') const response = require('../response')
const imageRouter = module.exports = Router() const imageRouter = module.exports = Router()
@ -21,7 +21,7 @@ imageRouter.post('/uploadimage', function (req, res) {
form.parse(req, function (err, fields, files) { form.parse(req, function (err, fields, files) {
if (err || !files.image || !files.image.path) { if (err || !files.image || !files.image.path) {
response.errorForbidden(res) response.errorForbidden(req, res)
} else { } else {
if (config.debug) { if (config.debug) {
logger.info('SERVER received uploadimage: ' + JSON.stringify(files.image)) logger.info('SERVER received uploadimage: ' + JSON.stringify(files.image))

View File

@ -1,8 +1,8 @@
'use strict' 'use strict'
const config = require('../../config') const config = require('../config')
const logger = require('../../logger') const logger = require('../logger')
const lutim = require('lutim') const lutim = require('lib/imageRouter/lutim')
exports.uploadImage = function (imagePath, callback) { exports.uploadImage = function (imagePath, callback) {
if (!imagePath || typeof imagePath !== 'string') { if (!imagePath || typeof imagePath !== 'string') {

View File

@ -2,11 +2,11 @@
const fs = require('fs') const fs = require('fs')
const path = require('path') const path = require('path')
const config = require('../../config') const config = require('../config')
const { getImageMimeType } = require('../../utils') const { getImageMimeType } = require('../utils')
const logger = require('../../logger') const logger = require('../logger')
const Minio = require('minio') const Minio = require('lib/imageRouter/minio')
const minioClient = new Minio.Client({ const minioClient = new Minio.Client({
endPoint: config.minio.endPoint, endPoint: config.minio.endPoint,
port: config.minio.port, port: config.minio.port,

View File

@ -2,9 +2,9 @@
const fs = require('fs') const fs = require('fs')
const path = require('path') const path = require('path')
const config = require('../../config') const config = require('../config')
const { getImageMimeType } = require('../../utils') const { getImageMimeType } = require('../utils')
const logger = require('../../logger') const logger = require('../logger')
const AWS = require('aws-sdk') const AWS = require('aws-sdk')
const awsConfig = new AWS.Config(config.s3) const awsConfig = new AWS.Config(config.s3)

View File

@ -1,14 +1,14 @@
'use strict' 'use strict'
const logger = require('../../logger') const logger = require('../logger')
const response = require('../../response') const response = require('../response')
module.exports = function (req, res, next) { module.exports = function (req, res, next) {
try { try {
decodeURIComponent(req.path) decodeURIComponent(req.path)
} catch (err) { } catch (err) {
logger.error(err) logger.error(err)
return response.errorBadRequest(res) return response.errorBadRequest(req, res)
} }
next() next()
} }

View File

@ -1,6 +1,6 @@
'use strict' 'use strict'
const config = require('../../config') const config = require('../config')
module.exports = function (req, res, next) { module.exports = function (req, res, next) {
res.set({ res.set({

View File

@ -1,6 +1,6 @@
'use strict' 'use strict'
const config = require('../../config') const config = require('../config')
module.exports = function (req, res, next) { module.exports = function (req, res, next) {
if (req.method === 'GET' && req.path.substr(-1) === '/' && req.path.length > 1) { if (req.method === 'GET' && req.path.substr(-1) === '/' && req.path.length > 1) {

View File

@ -2,14 +2,14 @@
const toobusy = require('toobusy-js') const toobusy = require('toobusy-js')
const config = require('../../config') const config = require('../config')
const response = require('../../response') const response = require('../response')
toobusy.maxLag(config.responseMaxLag) toobusy.maxLag(config.responseMaxLag)
module.exports = function (req, res, next) { module.exports = function (req, res, next) {
if (toobusy()) { if (toobusy()) {
response.errorServiceUnavailable(res) response.errorServiceUnavailable(req, res)
} else { } else {
next() next()
} }

View File

@ -0,0 +1,26 @@
'use strict'
module.exports = {
up: (queryInterface, Sequelize) => {
return queryInterface.dropTable('Temp')
/*
Add altering commands here.
Return a promise to correctly handle asynchronicity.
Example:
return queryInterface.createTable('users', { id: Sequelize.INTEGER });
*/
},
down: (queryInterface, Sequelize) => {
return queryInterface.createTable('Temp', {
id: {
type: Sequelize.STRING,
primaryKey: true
},
date: Sequelize.TEXT,
createdAt: Sequelize.DATE,
updatedAt: Sequelize.DATE
})
}
}

View File

@ -186,6 +186,16 @@ module.exports = function (sequelize, DataTypes) {
var result = id.match(uuidRegex) var result = id.match(uuidRegex)
if (result && result.length === 1) { return true } else { return false } if (result && result.length === 1) { return true } else { return false }
} }
Note.parseNoteIdAsync = function (noteId) {
return new Promise((resolve, reject) => {
Note.parseNoteId(noteId, (err, id) => {
if (err) {
return reject(err)
}
resolve(id)
})
})
}
Note.parseNoteId = function (noteId, callback) { Note.parseNoteId = function (noteId, callback) {
async.series({ async.series({
parseNoteIdByAlias: function (_callback) { parseNoteIdByAlias: function (_callback) {

View File

@ -1,18 +0,0 @@
'use strict'
// external modules
var shortId = require('shortid')
module.exports = function (sequelize, DataTypes) {
var Temp = sequelize.define('Temp', {
id: {
type: DataTypes.STRING,
primaryKey: true,
defaultValue: shortId.generate
},
data: {
type: DataTypes.TEXT
}
})
return Temp
}

193
lib/note/index.js Normal file
View File

@ -0,0 +1,193 @@
'use strict'
const config = require('../config')
const logger = require('../logger')
const { Note, User } = require('../models')
const { newCheckViewPermission, errorForbidden, responseCodiMD, errorNotFound } = require('../response')
const { updateHistory } = require('../history')
const { actionPublish, actionSlide, actionInfo, actionDownload, actionPDF, actionGist, actionRevision, actionPandoc } = require('./noteActions')
async function getNoteById (noteId, { includeUser } = { includeUser: false }) {
const id = await Note.parseNoteIdAsync(noteId)
const includes = []
if (includeUser) {
includes.push({
model: User,
as: 'owner'
}, {
model: User,
as: 'lastchangeuser'
})
}
const note = await Note.findOne({
where: {
id: id
},
include: includes
})
return note
}
async function createNote (userId, noteAlias) {
if (!config.allowAnonymous && !!userId) {
throw new Error('can not create note')
}
const note = await Note.create({
ownerId: userId,
alias: noteAlias
})
if (userId) {
updateHistory(userId, note)
}
return note
}
// controller
async function showNote (req, res) {
const noteId = req.params.noteId
const userId = req.user ? req.user.id : null
let note = await getNoteById(noteId)
if (!note) {
// if allow free url enable, auto create note
if (!config.allowFreeURL || config.forbiddenNoteIDs.includes(noteId)) {
return errorNotFound(req, res)
}
note = await createNote(userId, noteId)
}
if (!newCheckViewPermission(note, req.isAuthenticated(), userId)) {
return errorForbidden(req, res)
}
// force to use note id
const id = Note.encodeNoteId(note.id)
if ((note.alias && noteId !== note.alias) || (!note.alias && noteId !== id)) {
return res.redirect(config.serverURL + '/' + (note.alias || id))
}
return responseCodiMD(res, note)
}
function canViewNote (note, isLogin, userId) {
if (note.permission === 'private') {
return note.ownerId === userId
}
if (note.permission === 'limited' || note.permission === 'protected') {
return isLogin
}
return true
}
async function showPublishNote (req, res) {
const shortid = req.params.shortid
const note = await getNoteById(shortid, {
includeUser: true
})
if (!note) {
return errorNotFound(req, res)
}
if (!canViewNote(note, req.isAuthenticated(), req.user ? req.user.id : null)) {
return errorForbidden(req, res)
}
if ((note.alias && shortid !== note.alias) || (!note.alias && shortid !== note.shortid)) {
return res.redirect(config.serverURL + '/s/' + (note.alias || note.shortid))
}
await note.increment('viewcount')
const body = note.content
const extracted = Note.extractMeta(body)
const markdown = extracted.markdown
const meta = Note.parseMeta(extracted.meta)
const createTime = note.createdAt
const updateTime = note.lastchangeAt
const title = Note.generateWebTitle(meta.title || Note.decodeTitle(note.title))
const data = {
title: title,
description: meta.description || (markdown ? Note.generateDescription(markdown) : null),
viewcount: note.viewcount,
createtime: createTime,
updatetime: updateTime,
body: body,
owner: note.owner ? note.owner.id : null,
ownerprofile: note.owner ? User.getProfile(note.owner) : null,
lastchangeuser: note.lastchangeuser ? note.lastchangeuser.id : null,
lastchangeuserprofile: note.lastchangeuser ? User.getProfile(note.lastchangeuser) : null,
robots: meta.robots || false, // default allow robots
GA: meta.GA,
disqus: meta.disqus,
cspNonce: res.locals.nonce
}
res.set({
'Cache-Control': 'private' // only cache by client
})
res.render('pretty.ejs', data)
}
async function noteActions (req, res) {
const noteId = req.params.noteId
const note = await getNoteById(noteId)
if (!note) {
return errorNotFound(req, res)
}
if (!canViewNote(note, req.isAuthenticated(), req.user ? req.user.id : null)) {
return errorForbidden(req, res)
}
const action = req.params.action
switch (action) {
case 'publish':
case 'pretty': // pretty deprecated
return actionPublish(req, res, note)
case 'slide':
return actionSlide(req, res, note)
case 'download':
actionDownload(req, res, note)
break
case 'info':
actionInfo(req, res, note)
break
case 'pdf':
if (config.allowPDFExport) {
actionPDF(req, res, note)
} else {
logger.error('PDF export failed: Disabled by config. Set "allowPDFExport: true" to enable. Check the documentation for details')
errorForbidden(req, res)
}
break
case 'gist':
actionGist(req, res, note)
break
case 'revision':
actionRevision(req, res, note)
break
case 'pandoc':
actionPandoc(req, res, note)
break
default:
return res.redirect(config.serverURL + '/' + noteId)
}
}
exports.showNote = showNote
exports.showPublishNote = showPublishNote
exports.noteActions = noteActions

226
lib/note/noteActions.js Normal file
View File

@ -0,0 +1,226 @@
'use strict'
const fs = require('fs')
const path = require('path')
const markdownpdf = require('markdown-pdf')
const shortId = require('shortid')
const querystring = require('querystring')
const moment = require('moment')
const { Pandoc } = require('@hackmd/pandoc.js')
const config = require('../config')
const logger = require('../logger')
const { Note, Revision } = require('../models')
const { errorInternalError, errorNotFound } = require('../response')
function actionPublish (req, res, note) {
res.redirect(config.serverURL + '/s/' + (note.alias || note.shortid))
}
function actionSlide (req, res, note) {
res.redirect(config.serverURL + '/p/' + (note.alias || note.shortid))
}
function actionDownload (req, res, note) {
const body = note.content
const title = Note.decodeTitle(note.title)
const filename = encodeURIComponent(title)
res.set({
'Access-Control-Allow-Origin': '*', // allow CORS as API
'Access-Control-Allow-Headers': 'Range',
'Access-Control-Expose-Headers': 'Cache-Control, Content-Encoding, Content-Range',
'Content-Type': 'text/markdown; charset=UTF-8',
'Cache-Control': 'private',
'Content-disposition': 'attachment; filename=' + filename + '.md',
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
res.send(body)
}
function actionInfo (req, res, note) {
const body = note.content
const extracted = Note.extractMeta(body)
const markdown = extracted.markdown
const meta = Note.parseMeta(extracted.meta)
const createtime = note.createdAt
const updatetime = note.lastchangeAt
const title = Note.decodeTitle(note.title)
const data = {
title: meta.title || title,
description: meta.description || (markdown ? Note.generateDescription(markdown) : null),
viewcount: note.viewcount,
createtime: createtime,
updatetime: updatetime
}
res.set({
'Access-Control-Allow-Origin': '*', // allow CORS as API
'Access-Control-Allow-Headers': 'Range',
'Access-Control-Expose-Headers': 'Cache-Control, Content-Encoding, Content-Range',
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
res.send(data)
}
function actionPDF (req, res, note) {
const url = config.serverURL || 'http://' + req.get('host')
const body = note.content
const extracted = Note.extractMeta(body)
let content = extracted.markdown
const title = Note.decodeTitle(note.title)
const highlightCssPath = path.join(config.appRootPath, '/node_modules/highlight.js/styles/github-gist.css')
if (!fs.existsSync(config.tmpPath)) {
fs.mkdirSync(config.tmpPath)
}
const pdfPath = config.tmpPath + '/' + Date.now() + '.pdf'
content = content.replace(/\]\(\//g, '](' + url + '/')
const markdownpdfOptions = {
highlightCssPath: highlightCssPath
}
markdownpdf(markdownpdfOptions).from.string(content).to(pdfPath, function () {
if (!fs.existsSync(pdfPath)) {
logger.error('PDF seems to not be generated as expected. File doesn\'t exist: ' + pdfPath)
return errorInternalError(req, res)
}
const stream = fs.createReadStream(pdfPath)
let filename = title
// Be careful of special characters
filename = encodeURIComponent(filename)
// Ideally this should strip them
res.setHeader('Content-disposition', 'attachment; filename="' + filename + '.pdf"')
res.setHeader('Cache-Control', 'private')
res.setHeader('Content-Type', 'application/pdf; charset=UTF-8')
res.setHeader('X-Robots-Tag', 'noindex, nofollow') // prevent crawling
stream.on('end', () => {
stream.close()
fs.unlinkSync(pdfPath)
})
stream.pipe(res)
})
}
const outputFormats = {
asciidoc: 'text/plain',
context: 'application/x-latex',
epub: 'application/epub+zip',
epub3: 'application/epub+zip',
latex: 'application/x-latex',
odt: 'application/vnd.oasis.opendocument.text',
pdf: 'application/pdf',
rst: 'text/plain',
rtf: 'application/rtf',
textile: 'text/plain',
docx: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
}
async function actionPandoc (req, res, note) {
var url = config.serverURL || 'http://' + req.get('host')
var body = note.content
var extracted = Note.extractMeta(body)
var content = extracted.markdown
var title = Note.decodeTitle(note.title)
if (!fs.existsSync(config.tmpPath)) {
fs.mkdirSync(config.tmpPath)
}
const pandoc = new Pandoc()
var path = config.tmpPath + '/' + Date.now()
content = content.replace(/\]\(\//g, '](' + url + '/')
// TODO: check export type
const { exportType } = req.query
try {
// TODO: timeout rejection
await pandoc.convertToFile(content, 'markdown', exportType, path, [
'--metadata', `title=${title}`
])
var stream = fs.createReadStream(path)
var filename = title
// Be careful of special characters
filename = encodeURIComponent(filename)
// Ideally this should strip them
res.setHeader('Content-disposition', `attachment; filename="${filename}.${exportType}"`)
res.setHeader('Cache-Control', 'private')
res.setHeader('Content-Type', `${outputFormats[exportType]}; charset=UTF-8`)
res.setHeader('X-Robots-Tag', 'noindex, nofollow') // prevent crawling
stream.pipe(res)
} catch (err) {
// TODO: handle error
res.json({
message: err.message
})
}
}
function actionGist (req, res, note) {
const data = {
client_id: config.github.clientID,
redirect_uri: config.serverURL + '/auth/github/callback/' + Note.encodeNoteId(note.id) + '/gist',
scope: 'gist',
state: shortId.generate()
}
const query = querystring.stringify(data)
res.redirect('https://github.com/login/oauth/authorize?' + query)
}
function actionRevision (req, res, note) {
const actionId = req.params.actionId
if (actionId) {
const time = moment(parseInt(actionId))
if (!time.isValid()) {
return errorNotFound(req, res)
}
Revision.getPatchedNoteRevisionByTime(note, time, function (err, content) {
if (err) {
logger.error(err)
return errorInternalError(req, res)
}
if (!content) {
return errorNotFound(req, res)
}
res.set({
'Access-Control-Allow-Origin': '*', // allow CORS as API
'Access-Control-Allow-Headers': 'Range',
'Access-Control-Expose-Headers': 'Cache-Control, Content-Encoding, Content-Range',
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
res.send(content)
})
} else {
Revision.getNoteRevisions(note, function (err, data) {
if (err) {
logger.error(err)
return errorInternalError(req, res)
}
const result = {
revision: data
}
res.set({
'Access-Control-Allow-Origin': '*', // allow CORS as API
'Access-Control-Allow-Headers': 'Range',
'Access-Control-Expose-Headers': 'Cache-Control, Content-Encoding, Content-Range',
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
})
res.send(result)
})
}
}
exports.actionPublish = actionPublish
exports.actionSlide = actionSlide
exports.actionDownload = actionDownload
exports.actionInfo = actionInfo
exports.actionPDF = actionPDF
exports.actionGist = actionGist
exports.actionPandoc = actionPandoc
exports.actionRevision = actionRevision

Some files were not shown because too many files have changed in this diff Show More