From cd3f88bad066101e1e95aee84fe322b684482096 Mon Sep 17 00:00:00 2001 From: Nick Date: Wed, 13 Feb 2019 17:20:46 -0500 Subject: [PATCH] refactor: removed redis + new scheduler engine --- Makefile | 5 +- .../graph/admin/system/system-query-info.gql | 4 - config.sample.yml | 19 ++-- dev/build/config.yml | 1 + dev/docker-mariadb/config.yml | 4 - dev/docker-mariadb/docker-compose.yml | 11 --- dev/docker-mariadb/init.sh | 3 +- dev/docker-mssql/config.yml | 4 - dev/docker-mssql/docker-compose.yml | 11 --- dev/docker-mssql/init.sh | 3 +- dev/docker-mysql/config.yml | 4 - dev/docker-mysql/docker-compose.yml | 11 --- dev/docker-mysql/init.sh | 3 +- dev/docker-postgres/config.yml | 4 - dev/docker-postgres/docker-compose.yml | 11 --- dev/docker-postgres/init.sh | 3 +- dev/docker-sqlite/config.yml | 4 - dev/docker-sqlite/docker-compose.yml | 13 --- dev/docker-sqlite/init.sh | 6 -- dev/examples/docker-compose.yml | 8 -- dev/scripts/docker-clean-db.js | 35 -------- package.json | 10 --- server/app/data.yml | 26 +----- server/core/cache.js | 7 ++ server/core/job.js | 84 ++++++++++++++++++ server/core/kernel.js | 17 ++-- server/core/localization.js | 9 -- server/core/queue.js | 63 ------------- server/core/redis.js | 36 -------- server/core/scheduler.js | 34 +++++++ server/core/system.js | 9 -- server/core/worker.js | 11 +-- server/db/migrator-source.js | 2 +- server/graph/resolvers/localization.js | 15 ++-- server/graph/resolvers/system.js | 12 --- server/graph/schemas/system.graphql | 4 - server/jobs/fetch-graph-locale.js | 25 +++--- server/jobs/purge-uploads.js | 12 ++- server/jobs/render-page.js | 34 ++++--- server/jobs/sync-graph-locales.js | 11 +-- server/jobs/sync-graph-updates.js | 14 ++- server/master.js | 4 +- server/models/navigation.js | 6 +- server/models/pages.js | 22 ++--- server/models/renderers.js | 24 ++--- server/models/storage.js | 5 ++ server/models/users.js | 2 +- server/modules/storage/disk/definition.yml | 1 + server/modules/storage/git/definition.yml | 1 + wiki.js | 9 +- yarn.lock | Bin 579552 -> 567990 bytes 51 files changed, 253 insertions(+), 423 deletions(-) delete mode 100644 dev/docker-sqlite/init.sh delete mode 100644 dev/scripts/docker-clean-db.js create mode 100644 server/core/cache.js create mode 100644 server/core/job.js delete mode 100644 server/core/queue.js delete mode 100644 server/core/redis.js create mode 100644 server/core/scheduler.js diff --git a/Makefile b/Makefile index 48112812..eacb617f 100644 --- a/Makefile +++ b/Makefile @@ -28,19 +28,18 @@ docker-dev-up: ## Run dockerized dev environment docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec wiki yarn dev docker-dev-down: ## Shutdown dockerized dev environment - docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . down + docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . down --remove-orphans docker-dev-rebuild: ## Rebuild dockerized dev image rm -rf ./node_modules docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . build --no-cache --force-rm -docker-dev-clean: ## Clean DB, redis and data folders +docker-dev-clean: ## Clean DB and data folders rm -rf ./data [[ "${DEVDB}" == "postgres" ]] && docker-compose -f ./dev/docker-postgres/docker-compose.yml -p wiki --project-directory . exec db psql --dbname=wiki --username=wikijs --command='DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public' || true [[ "${DEVDB}" == "mysql" || "${DEVDB}" == "mariadb" ]] && docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec db mysql -uroot -p'wikijsrocks' -e 'DROP SCHEMA IF EXISTS wiki; CREATE SCHEMA wiki;' || true [[ "${DEVDB}" == "mssql" ]] && docker-compose -f ./dev/docker-mssql/docker-compose.yml -p wiki --project-directory . exec db /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P 'W1kiJSR0cks!' -Q 'DROP DATABASE IF EXISTS wiki; CREATE DATABASE wiki;' || true [[ "${DEVDB}" == "sqlite" ]] && docker-compose -f ./dev/docker-sqlite/docker-compose.yml -p wiki --project-directory . exec wiki rm -rf /wiki/db.sqlite || true - docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec redis redis-cli flushall docker-dev-bash: ## Rebuild dockerized dev image docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec wiki bash diff --git a/client/graph/admin/system/system-query-info.gql b/client/graph/admin/system/system-query-info.gql index d38aace0..400a7405 100644 --- a/client/graph/admin/system/system-query-info.gql +++ b/client/graph/admin/system/system-query-info.gql @@ -15,10 +15,6 @@ query { ramTotal workingDirectory nodeVersion - redisVersion - redisUsedRAM - redisTotalRAM - redisHost } } } diff --git a/config.sample.yml b/config.sample.yml index 347149c8..fc802660 100644 --- a/config.sample.yml +++ b/config.sample.yml @@ -31,22 +31,19 @@ db: # SQLite only: storage: path/to/database.sqlite -# --------------------------------------------------------------------- -# Redis -# --------------------------------------------------------------------- -# Redis 3.2 or later required - -redis: - host: localhost - port: 6379 - db: 0 - password: null - ####################################################################### # ADVANCED OPTIONS # ####################################################################### # Do not change unless you know what you are doing! +# --------------------------------------------------------------------- +# Use X-Forwarded-For header +# --------------------------------------------------------------------- +# Enable only if Wiki.js is behind a reverse-proxy (nginx, apache, etc) +# or a cloud proxying services like Cloudflare. + +trustProxy: false + # --------------------------------------------------------------------- # SSL/TLS Settings # --------------------------------------------------------------------- diff --git a/dev/build/config.yml b/dev/build/config.yml index b5c030df..527145d0 100644 --- a/dev/build/config.yml +++ b/dev/build/config.yml @@ -13,4 +13,5 @@ redis: port: $(REDIS_PORT) db: $(REDIS_DB) password: $(REDIS_PASS) +trustProxy: $(TRUST_PROXY) logLevel: info diff --git a/dev/docker-mariadb/config.yml b/dev/docker-mariadb/config.yml index 0dcd4528..78f30aca 100644 --- a/dev/docker-mariadb/config.yml +++ b/dev/docker-mariadb/config.yml @@ -7,8 +7,4 @@ db: user: wikijs pass: wikijsrocks db: wiki -redis: - host: redis - port: 6379 - db: 0 logLevel: info diff --git a/dev/docker-mariadb/docker-compose.yml b/dev/docker-mariadb/docker-compose.yml index 9ca820b7..92edb67c 100644 --- a/dev/docker-mariadb/docker-compose.yml +++ b/dev/docker-mariadb/docker-compose.yml @@ -3,16 +3,6 @@ version: "3" services: - - redis: - image: redis:4-alpine - ports: - - "16379:6379" - logging: - driver: "none" - networks: - - wikinet - db: image: mariadb:10.3 environment: @@ -44,7 +34,6 @@ services: dockerfile: dev/docker-mariadb/Dockerfile depends_on: - db - - redis networks: - wikinet ports: diff --git a/dev/docker-mariadb/init.sh b/dev/docker-mariadb/init.sh index 69d78eed..1b0dc81d 100644 --- a/dev/docker-mariadb/init.sh +++ b/dev/docker-mariadb/init.sh @@ -1,7 +1,6 @@ #!/bin/sh -echo "Waiting for redis and mariadb to start up..." -bash ./dev/docker-common/wait.sh redis:6379 +echo "Waiting for mariadb to start up..." bash ./dev/docker-common/wait.sh db:3306 echo "=== READY ===" tail -f /dev/null diff --git a/dev/docker-mssql/config.yml b/dev/docker-mssql/config.yml index 5ebedf09..3993b442 100644 --- a/dev/docker-mssql/config.yml +++ b/dev/docker-mssql/config.yml @@ -7,8 +7,4 @@ db: user: SA pass: W1kiJSR0cks! db: wiki -redis: - host: redis - port: 6379 - db: 0 logLevel: info diff --git a/dev/docker-mssql/docker-compose.yml b/dev/docker-mssql/docker-compose.yml index a36ca385..0f66e095 100644 --- a/dev/docker-mssql/docker-compose.yml +++ b/dev/docker-mssql/docker-compose.yml @@ -3,16 +3,6 @@ version: "3" services: - - redis: - image: redis:4-alpine - ports: - - "16379:6379" - logging: - driver: "none" - networks: - - wikinet - db: image: mcr.microsoft.com/mssql/server:2017-latest environment: @@ -34,7 +24,6 @@ services: dockerfile: dev/docker-mssql/Dockerfile depends_on: - db - - redis networks: - wikinet ports: diff --git a/dev/docker-mssql/init.sh b/dev/docker-mssql/init.sh index 5dba84af..23f4bf15 100644 --- a/dev/docker-mssql/init.sh +++ b/dev/docker-mssql/init.sh @@ -1,7 +1,6 @@ #!/bin/sh -echo "Waiting for redis and mssql to start up..." -bash ./dev/docker-common/wait.sh redis:6379 +echo "Waiting for mssql to start up..." bash ./dev/docker-common/wait.sh db:1433 echo "=== READY ===" tail -f /dev/null diff --git a/dev/docker-mysql/config.yml b/dev/docker-mysql/config.yml index d83189ab..e99a9f92 100644 --- a/dev/docker-mysql/config.yml +++ b/dev/docker-mysql/config.yml @@ -7,8 +7,4 @@ db: user: wikijs pass: wikijsrocks db: wiki -redis: - host: redis - port: 6379 - db: 0 logLevel: info diff --git a/dev/docker-mysql/docker-compose.yml b/dev/docker-mysql/docker-compose.yml index 82a3eee0..e32782e1 100644 --- a/dev/docker-mysql/docker-compose.yml +++ b/dev/docker-mysql/docker-compose.yml @@ -3,16 +3,6 @@ version: "3" services: - - redis: - image: redis:4-alpine - ports: - - "16379:6379" - logging: - driver: "none" - networks: - - wikinet - db: image: mysql:5.7 environment: @@ -44,7 +34,6 @@ services: dockerfile: dev/docker-mysql/Dockerfile depends_on: - db - - redis networks: - wikinet ports: diff --git a/dev/docker-mysql/init.sh b/dev/docker-mysql/init.sh index 07f4ae3e..d70876cc 100644 --- a/dev/docker-mysql/init.sh +++ b/dev/docker-mysql/init.sh @@ -1,7 +1,6 @@ #!/bin/sh -echo "Waiting for redis and mysql to start up..." -bash ./dev/docker-common/wait.sh redis:6379 +echo "Waiting for mysql to start up..." bash ./dev/docker-common/wait.sh db:3306 echo "=== READY ===" tail -f /dev/null diff --git a/dev/docker-postgres/config.yml b/dev/docker-postgres/config.yml index 7003e329..ce42e0af 100644 --- a/dev/docker-postgres/config.yml +++ b/dev/docker-postgres/config.yml @@ -7,8 +7,4 @@ db: user: wikijs pass: wikijsrocks db: wiki -redis: - host: redis - port: 6379 - db: 0 logLevel: info diff --git a/dev/docker-postgres/docker-compose.yml b/dev/docker-postgres/docker-compose.yml index aa5600a4..06383e06 100644 --- a/dev/docker-postgres/docker-compose.yml +++ b/dev/docker-postgres/docker-compose.yml @@ -3,16 +3,6 @@ version: "3" services: - - redis: - image: redis:4-alpine - ports: - - "16379:6379" - logging: - driver: "none" - networks: - - wikinet - db: image: postgres:9-alpine environment: @@ -43,7 +33,6 @@ services: dockerfile: dev/docker-postgres/Dockerfile depends_on: - db - - redis networks: - wikinet ports: diff --git a/dev/docker-postgres/init.sh b/dev/docker-postgres/init.sh index 96e6182c..daf1b184 100644 --- a/dev/docker-postgres/init.sh +++ b/dev/docker-postgres/init.sh @@ -1,7 +1,6 @@ #!/bin/sh -echo "Waiting for redis and postgres to start up..." -bash ./dev/docker-common/wait.sh redis:6379 +echo "Waiting for postgres to start up..." bash ./dev/docker-common/wait.sh db:5432 echo "=== READY ===" tail -f /dev/null diff --git a/dev/docker-sqlite/config.yml b/dev/docker-sqlite/config.yml index 5c04c370..edb96072 100644 --- a/dev/docker-sqlite/config.yml +++ b/dev/docker-sqlite/config.yml @@ -3,8 +3,4 @@ bindIP: 0.0.0.0 db: type: sqlite storage: /wiki/db.sqlite -redis: - host: redis - port: 6379 - db: 0 logLevel: info diff --git a/dev/docker-sqlite/docker-compose.yml b/dev/docker-sqlite/docker-compose.yml index 87c3de5e..4b084778 100644 --- a/dev/docker-sqlite/docker-compose.yml +++ b/dev/docker-sqlite/docker-compose.yml @@ -3,22 +3,10 @@ version: "3" services: - - redis: - image: redis:4-alpine - ports: - - "16379:6379" - logging: - driver: "none" - networks: - - wikinet - wiki: build: context: . dockerfile: dev/docker-sqlite/Dockerfile - depends_on: - - redis networks: - wikinet ports: @@ -26,7 +14,6 @@ services: volumes: - .:/wiki - /wiki/node_modules - command: ["sh", "./dev/docker-sqlite/init.sh"] networks: wikinet: diff --git a/dev/docker-sqlite/init.sh b/dev/docker-sqlite/init.sh deleted file mode 100644 index 8cd23b81..00000000 --- a/dev/docker-sqlite/init.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -echo "Waiting for redis to start up..." -bash ./dev/docker-common/wait.sh redis:6379 -echo "=== READY ===" -tail -f /dev/null diff --git a/dev/examples/docker-compose.yml b/dev/examples/docker-compose.yml index fa202c97..b87ce023 100644 --- a/dev/examples/docker-compose.yml +++ b/dev/examples/docker-compose.yml @@ -1,13 +1,5 @@ version: "3" services: - - redis: - image: redis:4-alpine - logging: - driver: "none" - networks: - - wikinet - db: image: postgres:9-alpine environment: diff --git a/dev/scripts/docker-clean-db.js b/dev/scripts/docker-clean-db.js deleted file mode 100644 index d2b3c8ad..00000000 --- a/dev/scripts/docker-clean-db.js +++ /dev/null @@ -1,35 +0,0 @@ -const { Client } = require('pg') -const fs = require('fs') -const path = require('path') -const yaml = require('js-yaml') - -let config = {} - -try { - conf = yaml.safeLoad( - cfgHelper.parseConfigValue( - fs.readFileSync(path.join(process.cwd(), 'dev/docker/config.yml'), 'utf8') - ) - ) -} catch (err) { - console.error(err.message) - process.exit(1) -} - -const client = new Client({ - user: config.db.username, - host: config.db.host, - database: config.db.database, - password: config.db.password, - port: config.db.port, -}) - -async function main () { - await client.connect() - await client.query('DROP SCHEMA public CASCADE;') - await client.query('CREATE SCHEMA public;') - await client.end() - console.info('Success.') -} - -main() diff --git a/package.json b/package.json index a9b4cc11..c13f6a72 100644 --- a/package.json +++ b/package.json @@ -48,14 +48,11 @@ "bcryptjs-then": "1.0.1", "bluebird": "3.5.3", "body-parser": "1.18.3", - "bull": "3.6.0", "chalk": "2.4.2", "cheerio": "1.0.0-rc.2", - "child-process-promise": "2.2.1", "chokidar": "2.0.4", "clean-css": "4.2.1", "compression": "1.7.3", - "connect-redis": "3.4.0", "cookie-parser": "1.4.3", "cors": "2.8.5", "custom-error-instance": "2.1.1", @@ -63,14 +60,10 @@ "diff": "4.0.1", "diff2html": "2.7.0", "dotize": "^0.3.0", - "execa": "1.0.0", "express": "4.16.4", "express-brute": "1.0.1", - "express-brute-redis": "0.0.1", - "express-session": "1.15.6", "file-type": "10.7.1", "filesize": "4.0.0", - "follow-redirects": "1.6.1", "fs-extra": "7.0.1", "getos": "3.1.1", "graphql": "14.1.1", @@ -83,7 +76,6 @@ "i18next-localstorage-cache": "1.1.1", "i18next-node-fs-backend": "2.1.1", "image-size": "0.7.1", - "ioredis": "4.6.2", "js-base64": "2.5.1", "js-binary": "1.2.0", "js-yaml": "3.12.1", @@ -117,7 +109,6 @@ "node-2fa": "1.1.2", "node-cache": "4.2.0", "nodemailer": "5.1.1", - "oauth2orize": "1.11.0", "objection": "1.5.3", "ora": "3.0.0", "passport": "0.4.0", @@ -146,7 +137,6 @@ "pug": "2.0.3", "qr-image": "3.2.0", "raven": "2.6.4", - "read-chunk": "3.0.0", "remove-markdown": "0.3.0", "request": "2.88.0", "request-promise": "4.2.2", diff --git a/server/app/data.yml b/server/app/data.yml index 351866d1..cce70b99 100644 --- a/server/app/data.yml +++ b/server/app/data.yml @@ -15,11 +15,6 @@ defaults: pass: wikijsrocks db: wiki storage: ./db.sqlite - redis: - host: localhost - port: 6379 - db: 0 - password: null ssl: enabled: false bindIP: 0.0.0.0 @@ -53,30 +48,15 @@ localeNamespaces: - auth - common jobs: - fetchGraphLocale: - onInit: false - cron: false - concurrency: 0 purgeUploads: onInit: true - cron: '*/15 * * * *' - concurrency: 0 - renderPage: - onInit: false - cron: false - concurrency: 1 + schedule: PT15M syncGraphLocales: onInit: true - cron: '0 0 * * *' - concurrency: 0 + schedule: P1D syncGraphUpdates: onInit: true - cron: '0 0 * * *' - concurrency: 0 - syncStorage: - onInit: false - cron: false - concurrency: 1 + schedule: P1D groups: defaultPermissions: - 'manage:pages' diff --git a/server/core/cache.js b/server/core/cache.js new file mode 100644 index 00000000..ea01b27e --- /dev/null +++ b/server/core/cache.js @@ -0,0 +1,7 @@ +const NodeCache = require('node-cache') + +module.exports = { + init() { + return new NodeCache() + } +} diff --git a/server/core/job.js b/server/core/job.js new file mode 100644 index 00000000..0d2fb350 --- /dev/null +++ b/server/core/job.js @@ -0,0 +1,84 @@ +const moment = require('moment') +const childProcess = require('child_process') + +module.exports = class Job { + constructor({ + name, + immediate = false, + schedule = 'P1D', + repeat = false, + worker = false + }) { + this.finished = Promise.resolve() + this.name = name + this.immediate = immediate + this.schedule = moment.duration(schedule) + this.repeat = repeat + this.worker = worker + } + + /** + * Start Job + * + * @param {Object} data Job Data + */ + start(data) { + if (this.immediate) { + this.invoke(data) + } else { + this.queue(data) + } + } + + /** + * Queue the next job run according to the wait duration + * + * @param {Object} data Job Data + */ + queue(data) { + this.timeout = setTimeout(this.invoke.bind(this), this.schedule.asMilliseconds(), data) + } + + /** + * Run the actual job + * + * @param {Object} data Job Data + */ + async invoke(data) { + try { + if (this.worker) { + const proc = childProcess.fork(`server/core/worker.js`, [ + `--job=${this.name}`, + `--data=${data}` + ], { + cwd: WIKI.ROOTPATH + }) + this.finished = new Promise((resolve, reject) => { + proc.on('exit', (code, signal) => { + if (code === 0) { + resolve() + } else { + reject(signal) + } + proc.kill() + }) + }) + } else { + this.finished = require(`../jobs/${this.name}`)(data) + } + await this.finished + } catch (err) { + WIKI.logger.warn(err) + } + if (this.repeat) { + this.queue(data) + } + } + + /** + * Stop any future job invocation from occuring + */ + stop() { + clearTimeout(this.timeout) + } +} diff --git a/server/core/kernel.js b/server/core/kernel.js index 38f0806e..994a2133 100644 --- a/server/core/kernel.js +++ b/server/core/kernel.js @@ -10,10 +10,10 @@ module.exports = { WIKI.logger.info('=======================================') WIKI.models = require('./db').init() - WIKI.redis = require('./redis').init() - WIKI.queue = require('./queue').init() - await this.preBootMaster() + await WIKI.models.onReady + await WIKI.configSvc.loadFromDb() + this.bootMaster() }, /** @@ -21,11 +21,10 @@ module.exports = { */ async preBootMaster() { try { - await WIKI.models.onReady - await WIKI.configSvc.loadFromDb() - await WIKI.queue.clean() + await this.initTelemetry() + WIKI.cache = require('./cache').init() + WIKI.scheduler = require('./scheduler').init() WIKI.events = new EventEmitter() - WIKI.redisSub = require('./redis').subscribe() } catch (err) { WIKI.logger.error(err) process.exit(1) @@ -40,7 +39,7 @@ module.exports = { WIKI.logger.info('Starting setup wizard...') require('../setup')() } else { - await this.initTelemetry() + await this.preBootMaster() await require('../master')() this.postBootMaster() } @@ -62,7 +61,7 @@ module.exports = { await WIKI.auth.activateStrategies() await WIKI.models.storage.initTargets() - await WIKI.queue.start() + WIKI.scheduler.start() }, /** * Init Telemetry diff --git a/server/core/localization.js b/server/core/localization.js index 54466e23..08be1b10 100644 --- a/server/core/localization.js +++ b/server/core/localization.js @@ -27,15 +27,6 @@ module.exports = { // Load current language + namespaces this.refreshNamespaces(true) - // Listen for localization events - WIKI.events.on('localization', (action) => { - switch (action) { - case 'reload': - this.refreshNamespaces() - break - } - }) - return this }, /** diff --git a/server/core/queue.js b/server/core/queue.js deleted file mode 100644 index 6b76ef55..00000000 --- a/server/core/queue.js +++ /dev/null @@ -1,63 +0,0 @@ -const path = require('path') -const Bull = require('bull') -const Promise = require('bluebird') -const _ = require('lodash') - -/* global WIKI */ - -module.exports = { - job: {}, - init() { - _.forOwn(WIKI.data.jobs, (queueParams, queueName) => { - this.job[queueName] = new Bull(queueName, { - prefix: `queue`, - redis: WIKI.config.redis - }) - if (queueParams.concurrency > 0) { - this.job[queueName].process(queueParams.concurrency, path.join(WIKI.SERVERPATH, `jobs/${_.kebabCase(queueName)}.js`)) - } else { - this.job[queueName].process(path.join(WIKI.SERVERPATH, `jobs/${_.kebabCase(queueName)}.js`)) - } - }) - return this - }, - start() { - _.forOwn(WIKI.data.jobs, (queueParams, queueName) => { - if (queueParams.onInit) { - this.job[queueName].add({}, { - removeOnComplete: true - }) - } - if (queueParams.cron) { - this.job[queueName].add({}, { - repeat: { cron: queueParams.cron }, - removeOnComplete: true - }) - } - }) - }, - async quit() { - for (const queueName in this.job) { - await this.job[queueName].close() - } - }, - async clean() { - return Promise.each(_.keys(WIKI.data.jobs), queueName => { - return new Promise((resolve, reject) => { - let keyStream = WIKI.redis.scanStream({ - match: `queue:${queueName}:*` - }) - keyStream.on('data', resultKeys => { - if (resultKeys.length > 0) { - WIKI.redis.del(resultKeys) - } - }) - keyStream.on('end', resolve) - }) - }).then(() => { - WIKI.logger.info('Purging old queue jobs: [ OK ]') - }).return(true).catch(err => { - WIKI.logger.error(err) - }) - } -} diff --git a/server/core/redis.js b/server/core/redis.js deleted file mode 100644 index 4a254d2a..00000000 --- a/server/core/redis.js +++ /dev/null @@ -1,36 +0,0 @@ -const Redis = require('ioredis') -const { isPlainObject } = require('lodash') - -/* global WIKI */ - -module.exports = { - init() { - if (isPlainObject(WIKI.config.redis)) { - let red = new Redis(WIKI.config.redis) - red.on('ready', () => { - WIKI.logger.info('Redis connection: [ OK ]') - }) - red.on('error', () => { - WIKI.logger.error('Failed to connect to Redis instance!') - process.exit(1) - }) - return red - } else { - WIKI.logger.error('Invalid Redis configuration!') - process.exit(1) - } - }, - subscribe() { - let red = this.init() - red.on('message', (channel, msg) => { - WIKI.events.emit(channel, msg) - }) - red.subscribe('localization', 'updates', (err, count) => { - if (err) { - WIKI.logger.error(err) - process.exit(1) - } - }) - return red - } -} diff --git a/server/core/scheduler.js b/server/core/scheduler.js new file mode 100644 index 00000000..0a2d01cc --- /dev/null +++ b/server/core/scheduler.js @@ -0,0 +1,34 @@ +const Job = require('./job') +const _ = require('lodash') + +/* global WIKI */ + +module.exports = { + jobs: [], + init() { + return this + }, + start() { + _.forOwn(WIKI.data.jobs, (queueParams, queueName) => { + this.registerJob({ + name: _.kebabCase(queueName), + immediate: queueParams.onInit, + schedule: queueParams.schedule, + repeat: true + }) + }) + }, + registerJob(opts, data) { + const job = new Job(opts) + job.start(data) + if (job.repeat) { + this.jobs.push(job) + } + return job + }, + stop() { + this.jobs.forEach(job => { + job.stop() + }) + } +} diff --git a/server/core/system.js b/server/core/system.js index 3d86d8ee..dcbe35cd 100644 --- a/server/core/system.js +++ b/server/core/system.js @@ -15,15 +15,6 @@ module.exports = { minimumNodeRequired: '10.12.0' }, init() { - // Listen for updates events - WIKI.events.on('updates', (infoRaw) => { - try { - this.updates = JSON.parse(infoRaw) - } catch (err) { - WIKI.logger.warn('Failed to parse updates info.') - } - }) - // Clear content cache fs.emptyDir(path.join(WIKI.ROOTPATH, 'data/cache')) diff --git a/server/core/worker.js b/server/core/worker.js index ba147c44..bb983c07 100644 --- a/server/core/worker.js +++ b/server/core/worker.js @@ -10,9 +10,10 @@ let WIKI = { global.WIKI = WIKI WIKI.configSvc.init() - -// ---------------------------------------- -// Init Logger -// ---------------------------------------- - WIKI.logger = require('./logger').init('JOB') +const args = require('yargs').argv + +;(async () => { + await require(`../jobs/${args.job}`)(args.data) + process.exit(0) +})() diff --git a/server/db/migrator-source.js b/server/db/migrator-source.js index bb55e600..debed19d 100644 --- a/server/db/migrator-source.js +++ b/server/db/migrator-source.js @@ -11,7 +11,7 @@ module.exports = { */ async getMigrations() { const absoluteDir = path.join(WIKI.SERVERPATH, 'db/migrations') - const migrationFiles = await fs.readdirAsync(absoluteDir) + const migrationFiles = await fs.readdir(absoluteDir) return migrationFiles.sort(semver.compare).map(m => ({ file: m, directory: absoluteDir diff --git a/server/graph/resolvers/localization.js b/server/graph/resolvers/localization.js index 80d71a25..3fc7c19c 100644 --- a/server/graph/resolvers/localization.js +++ b/server/graph/resolvers/localization.js @@ -12,9 +12,9 @@ module.exports = { }, LocalizationQuery: { async locales(obj, args, context, info) { - let remoteLocales = await WIKI.redis.get('locales') + let remoteLocales = await WIKI.cache.get('locales') let localLocales = await WIKI.models.locales.query().select('code', 'isRTL', 'name', 'nativeName', 'createdAt', 'updatedAt') - remoteLocales = (remoteLocales) ? JSON.parse(remoteLocales) : localLocales + remoteLocales = (remoteLocales) ? remoteLocales : localLocales return _.map(remoteLocales, rl => { let isInstalled = _.some(localLocales, ['code', rl.code]) return { @@ -39,12 +39,11 @@ module.exports = { LocalizationMutation: { async downloadLocale(obj, args, context) { try { - const job = await WIKI.queue.job.fetchGraphLocale.add({ - locale: args.locale - }, { - timeout: 30000 - }) - await job.finished() + const job = await WIKI.scheduler.registerJob({ + name: 'fetch-graph-locale', + immediate: true + }, args.locale) + await job.finished return { responseResult: graphHelper.generateSuccess('Locale downloaded successfully') } diff --git a/server/graph/resolvers/system.js b/server/graph/resolvers/system.js index 525c970a..159c400b 100644 --- a/server/graph/resolvers/system.js +++ b/server/graph/resolvers/system.js @@ -98,18 +98,6 @@ module.exports = { nodeVersion() { return process.version.substr(1) }, - redisVersion() { - return WIKI.redis.serverInfo.redis_version - }, - redisUsedRAM() { - return WIKI.redis.serverInfo.used_memory_human - }, - redisTotalRAM() { - return _.get(WIKI.redis.serverInfo, 'total_system_memory_human', 'N/A') - }, - redisHost() { - return WIKI.redis.options.host - }, async groupsTotal() { const total = await WIKI.models.groups.query().count('* as total').first().pluck('total') return _.toSafeInteger(total) diff --git a/server/graph/schemas/system.graphql b/server/graph/schemas/system.graphql index fc065133..4a3b0a34 100644 --- a/server/graph/schemas/system.graphql +++ b/server/graph/schemas/system.graphql @@ -38,10 +38,6 @@ type SystemInfo { pagesTotal: Int @auth(requires: ["manage:system", "manage:navigation", "manage:pages", "delete:pages"]) platform: String @auth(requires: ["manage:system"]) ramTotal: String @auth(requires: ["manage:system"]) - redisHost: String @auth(requires: ["manage:system"]) - redisTotalRAM: String @auth(requires: ["manage:system"]) - redisUsedRAM: String @auth(requires: ["manage:system"]) - redisVersion: String @auth(requires: ["manage:system"]) usersTotal: Int @auth(requires: ["manage:system", "manage:navigation", "manage:groups", "write:groups", "manage:users", "write:users"]) workingDirectory: String @auth(requires: ["manage:system"]) } diff --git a/server/jobs/fetch-graph-locale.js b/server/jobs/fetch-graph-locale.js index d1665962..f4b7c230 100644 --- a/server/jobs/fetch-graph-locale.js +++ b/server/jobs/fetch-graph-locale.js @@ -1,17 +1,12 @@ -require('../core/worker') const _ = require('lodash') const { createApolloFetch } = require('apollo-fetch') /* global WIKI */ -WIKI.redis = require('../core/redis').init() -WIKI.models = require('../core/db').init() - -module.exports = async (job) => { - WIKI.logger.info(`Fetching locale ${job.data.locale} from Graph endpoint...`) +module.exports = async (localeCode) => { + WIKI.logger.info(`Fetching locale ${localeCode} from Graph endpoint...`) try { - await WIKI.configSvc.loadFromDb() const apollo = createApolloFetch({ uri: WIKI.config.graphEndpoint }) @@ -26,7 +21,7 @@ module.exports = async (job) => { } }`, variables: { - code: job.data.locale + code: localeCode } }) const strings = _.get(respStrings, 'data.localization.strings', []) @@ -36,12 +31,12 @@ module.exports = async (job) => { _.set(lcObj, row.key.replace(':', '.'), row.value) }) - const locales = await WIKI.redis.get('locales') + const locales = await WIKI.cache.get('locales') if (locales) { - const currentLocale = _.find(JSON.parse(locales), ['code', job.data.locale]) || {} - await WIKI.models.locales.query().delete().where('code', job.data.locale) + const currentLocale = _.find(locales, ['code', localeCode]) || {} + await WIKI.models.locales.query().delete().where('code', localeCode) await WIKI.models.locales.query().insert({ - code: job.data.locale, + code: localeCode, strings: lcObj, isRTL: currentLocale.isRTL, name: currentLocale.name, @@ -51,11 +46,11 @@ module.exports = async (job) => { throw new Error('Failed to fetch cached locales list! Restart server to resolve this issue.') } - await WIKI.redis.publish('localization', 'reload') + await WIKI.lang.refreshNamespaces() - WIKI.logger.info(`Fetching locale ${job.data.locale} from Graph endpoint: [ COMPLETED ]`) + WIKI.logger.info(`Fetching locale ${localeCode} from Graph endpoint: [ COMPLETED ]`) } catch (err) { - WIKI.logger.error(`Fetching locale ${job.data.locale} from Graph endpoint: [ FAILED ]`) + WIKI.logger.error(`Fetching locale ${localeCode} from Graph endpoint: [ FAILED ]`) WIKI.logger.error(err.message) } } diff --git a/server/jobs/purge-uploads.js b/server/jobs/purge-uploads.js index 61a8d450..90a297af 100644 --- a/server/jobs/purge-uploads.js +++ b/server/jobs/purge-uploads.js @@ -1,26 +1,24 @@ -require('../core/worker') - /* global WIKI */ const Promise = require('bluebird') -const fs = Promise.promisifyAll(require('fs-extra')) +const fs = require('fs-extra') const moment = require('moment') const path = require('path') -module.exports = async (job) => { +module.exports = async () => { WIKI.logger.info('Purging orphaned upload files...') try { const uplTempPath = path.resolve(process.cwd(), WIKI.config.paths.data, 'uploads') - const ls = await fs.readdirAsync(uplTempPath) + const ls = await fs.readdir(uplTempPath) const fifteenAgo = moment().subtract(15, 'minutes') await Promise.map(ls, (f) => { - return fs.statAsync(path.join(uplTempPath, f)).then((s) => { return { filename: f, stat: s } }) + return fs.stat(path.join(uplTempPath, f)).then((s) => { return { filename: f, stat: s } }) }).filter((s) => { return s.stat.isFile() }).then((arrFiles) => { return Promise.map(arrFiles, (f) => { if (moment(f.stat.ctime).isBefore(fifteenAgo, 'minute')) { - return fs.unlinkAsync(path.join(uplTempPath, f.filename)) + return fs.unlink(path.join(uplTempPath, f.filename)) } }) }) diff --git a/server/jobs/render-page.js b/server/jobs/render-page.js index b26f8a0e..83751c2a 100644 --- a/server/jobs/render-page.js +++ b/server/jobs/render-page.js @@ -1,23 +1,29 @@ -require('../core/worker') - const _ = require('lodash') const cheerio = require('cheerio') /* global WIKI */ -WIKI.models = require('../core/db').init() - -module.exports = async (job) => { - WIKI.logger.info(`Rendering page ${job.data.page.path}...`) +module.exports = async (pageId) => { + WIKI.logger.info(`Rendering page ID ${pageId}...`) try { - let output = job.data.page.content - for (let core of job.data.pipeline) { + WIKI.models = require('../core/db').init() + + const page = await WIKI.models.pages.getPageFromDb(pageId) + if (!page) { + throw new Error('Invalid Page Id') + } + + await WIKI.models.renderers.fetchDefinitions() + const pipeline = await WIKI.models.renderers.getRenderingPipeline(page.contentType) + + let output = page.content + for (let core of pipeline) { const renderer = require(`../modules/rendering/${_.kebabCase(core.key)}/renderer.js`) output = await renderer.render.call({ config: core.config, children: core.children, - page: job.data.page, + page: page, input: output }) } @@ -61,18 +67,20 @@ module.exports = async (job) => { render: output, toc: JSON.stringify(toc.root) }) - .where('id', job.data.page.id) + .where('id', pageId) // Save to cache await WIKI.models.pages.savePageToCache({ - ...job.data.page, + ...page, render: output, toc: JSON.stringify(toc.root) }) - WIKI.logger.info(`Rendering page ${job.data.page.path}: [ COMPLETED ]`) + await WIKI.models.knex.destroy() + + WIKI.logger.info(`Rendering page ID ${pageId}: [ COMPLETED ]`) } catch (err) { - WIKI.logger.error(`Rendering page ${job.data.page.path}: [ FAILED ]`) + WIKI.logger.error(`Rendering page ID ${pageId}: [ FAILED ]`) WIKI.logger.error(err.message) } } diff --git a/server/jobs/sync-graph-locales.js b/server/jobs/sync-graph-locales.js index 422c207f..f5b0984f 100644 --- a/server/jobs/sync-graph-locales.js +++ b/server/jobs/sync-graph-locales.js @@ -1,17 +1,12 @@ -require('../core/worker') const _ = require('lodash') const { createApolloFetch } = require('apollo-fetch') /* global WIKI */ -WIKI.redis = require('../core/redis').init() -WIKI.models = require('../core/db').init() - -module.exports = async (job) => { +module.exports = async () => { WIKI.logger.info('Syncing locales with Graph endpoint...') try { - await WIKI.configSvc.loadFromDb() const apollo = createApolloFetch({ uri: WIKI.config.graphEndpoint }) @@ -33,7 +28,7 @@ module.exports = async (job) => { }` }) const locales = _.sortBy(_.get(respList, 'data.localization.locales', []), 'name').map(lc => ({...lc, isInstalled: (lc.code === 'en')})) - WIKI.redis.set('locales', JSON.stringify(locales)) + WIKI.cache.set('locales', locales) const currentLocale = _.find(locales, ['code', WIKI.config.lang.code]) // -> Download locale strings @@ -68,7 +63,7 @@ module.exports = async (job) => { }).where('code', WIKI.config.lang.code) } - await WIKI.redis.publish('localization', 'reload') + await WIKI.lang.refreshNamespaces() WIKI.logger.info('Syncing locales with Graph endpoint: [ COMPLETED ]') } catch (err) { diff --git a/server/jobs/sync-graph-updates.js b/server/jobs/sync-graph-updates.js index 6a8c9202..c4cd21b7 100644 --- a/server/jobs/sync-graph-updates.js +++ b/server/jobs/sync-graph-updates.js @@ -1,17 +1,12 @@ -require('../core/worker') const _ = require('lodash') const { createApolloFetch } = require('apollo-fetch') /* global WIKI */ -WIKI.redis = require('../core/redis').init() -WIKI.models = require('../core/db').init() - -module.exports = async (job) => { +module.exports = async () => { WIKI.logger.info(`Fetching latest updates from Graph endpoint...`) try { - await WIKI.configSvc.loadFromDb() const apollo = createApolloFetch({ uri: WIKI.config.graphEndpoint }) @@ -33,9 +28,10 @@ module.exports = async (job) => { version: WIKI.version } }) - const info = _.get(resp, 'data.releases.checkForUpdates', {}) - - await WIKI.redis.publish('updates', JSON.stringify(info)) + const info = _.get(resp, 'data.releases.checkForUpdates', false) + if (info) { + WIKI.system.updates = info + } WIKI.logger.info(`Fetching latest updates from Graph endpoint: [ COMPLETED ]`) } catch (err) { diff --git a/server/master.js b/server/master.js index 025a5805..b3440aba 100644 --- a/server/master.js +++ b/server/master.js @@ -46,7 +46,9 @@ module.exports = async () => { app.use(mw.security) app.use(cors(WIKI.config.cors)) app.options('*', cors(WIKI.config.cors)) - app.enable('trust proxy') + if (WIKI.config.trustProxy) { + app.enable('trust proxy') + } // ---------------------------------------- // Public Assets diff --git a/server/models/navigation.js b/server/models/navigation.js index 55ca4e17..5bcc8b9a 100644 --- a/server/models/navigation.js +++ b/server/models/navigation.js @@ -23,15 +23,15 @@ module.exports = class Navigation extends Model { static async getTree({ cache = false } = {}) { if (cache) { - const navTreeCached = await WIKI.redis.get('nav:sidebar') + const navTreeCached = await WIKI.cache.get('nav:sidebar') if (navTreeCached) { - return JSON.parse(navTreeCached) + return navTreeCached } } const navTree = await WIKI.models.navigation.query().findOne('key', 'site') if (navTree) { if (cache) { - await WIKI.redis.set('nav:sidebar', JSON.stringify(navTree.config), 'EX', 300) + await WIKI.cache.set('nav:sidebar', navTree.config, 300) } return navTree.config } else { diff --git a/server/models/pages.js b/server/models/pages.js index 6809bf60..a819d2dc 100644 --- a/server/models/pages.js +++ b/server/models/pages.js @@ -217,15 +217,12 @@ module.exports = class Page extends Model { } static async renderPage(page) { - const pipeline = await WIKI.models.renderers.getRenderingPipeline(page.contentType) - const renderJob = await WIKI.queue.job.renderPage.add({ - page, - pipeline - }, { - removeOnComplete: true, - removeOnFail: true - }) - return renderJob.finished() + const renderJob = await WIKI.scheduler.registerJob({ + name: 'render-page', + immediate: true, + worker: true + }, page.id) + return renderJob.finished } static async getPage(opts) { @@ -240,6 +237,7 @@ module.exports = class Page extends Model { } static async getPageFromDb(opts) { + const queryModeID = _.isNumber(opts) return WIKI.models.pages.query() .column([ 'pages.*', @@ -252,11 +250,14 @@ module.exports = class Page extends Model { ]) .joinRelation('author') .joinRelation('creator') - .where({ + .where(queryModeID ? { + 'pages.id': opts + } : { 'pages.path': opts.path, 'pages.localeCode': opts.locale }) .andWhere(builder => { + if (queryModeID) return builder.where({ 'pages.isPublished': true }).orWhere({ @@ -265,6 +266,7 @@ module.exports = class Page extends Model { }) }) .andWhere(builder => { + if (queryModeID) return if (opts.isPrivate) { builder.where({ 'pages.isPrivate': true, 'pages.privateNS': opts.privateNS }) } else { diff --git a/server/models/renderers.js b/server/models/renderers.js index f88f609c..0bb33d13 100644 --- a/server/models/renderers.js +++ b/server/models/renderers.js @@ -35,22 +35,26 @@ module.exports = class Renderer extends Model { return WIKI.models.renderers.query() } + static async fetchDefinitions() { + const rendererDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering')) + let diskRenderers = [] + for (let dir of rendererDirs) { + const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/rendering', dir, 'definition.yml'), 'utf8') + diskRenderers.push(yaml.safeLoad(def)) + } + WIKI.data.renderers = diskRenderers.map(renderer => ({ + ...renderer, + props: commonHelper.parseModuleProps(renderer.props) + })) + } + static async refreshRenderersFromDisk() { let trx try { const dbRenderers = await WIKI.models.renderers.query() // -> Fetch definitions from disk - const rendererDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering')) - let diskRenderers = [] - for (let dir of rendererDirs) { - const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/rendering', dir, 'definition.yml'), 'utf8') - diskRenderers.push(yaml.safeLoad(def)) - } - WIKI.data.renderers = diskRenderers.map(renderer => ({ - ...renderer, - props: commonHelper.parseModuleProps(renderer.props) - })) + await WIKI.models.renderers.fetchDefinitions() // -> Insert new Renderers let newRenderers = [] diff --git a/server/models/storage.js b/server/models/storage.js index 959c9df9..2113ce21 100644 --- a/server/models/storage.js +++ b/server/models/storage.js @@ -107,6 +107,11 @@ module.exports = class Storage extends Model { target.fn.config = target.config target.fn.mode = target.mode await target.fn.init() + // if (target.schedule) { + // WIKI.scheduler.registerJob({ + // name: + // }, target.fn.sync) + // } } } catch (err) { WIKI.logger.warn(err) diff --git a/server/models/users.js b/server/models/users.js index c1d9e7ef..23e63822 100644 --- a/server/models/users.js +++ b/server/models/users.js @@ -294,7 +294,7 @@ module.exports = class User extends Model { static async loginTFA(opts, context) { if (opts.securityCode.length === 6 && opts.loginToken.length === 64) { - let result = await WIKI.redis.get(`tfa:${opts.loginToken}`) + let result = null // await WIKI.redis.get(`tfa:${opts.loginToken}`) if (result) { let userId = _.toSafeInteger(result) if (userId && userId > 0) { diff --git a/server/modules/storage/disk/definition.yml b/server/modules/storage/disk/definition.yml index 199cd724..09466115 100644 --- a/server/modules/storage/disk/definition.yml +++ b/server/modules/storage/disk/definition.yml @@ -8,6 +8,7 @@ isAvailable: true supportedModes: - push defaultMode: push +schedule: false props: path: type: String diff --git a/server/modules/storage/git/definition.yml b/server/modules/storage/git/definition.yml index fcbd0858..3b5f938b 100644 --- a/server/modules/storage/git/definition.yml +++ b/server/modules/storage/git/definition.yml @@ -10,6 +10,7 @@ supportedModes: - push - pull defaultMode: sync +schedule: PT5M props: authType: type: String diff --git a/wiki.js b/wiki.js index b2cae576..dfd92677 100644 --- a/wiki.js +++ b/wiki.js @@ -117,13 +117,12 @@ const init = { } }, async reload() { + console.warn(chalk.yellow('--- Stopping scheduled jobs...')) + if (global.WIKI.scheduler) { + global.WIKI.scheduler.stop() + } console.warn(chalk.yellow('--- Closing DB connections...')) await global.WIKI.models.knex.destroy() - console.warn(chalk.yellow('--- Closing Redis connections...')) - await global.WIKI.redis.quit() - await global.WIKI.redisSub.quit() - console.warn(chalk.yellow('--- Closing Queue connections...')) - await global.WIKI.queue.quit() console.warn(chalk.yellow('--- Closing Server connections...')) global.WIKI.server.destroy(() => { console.warn(chalk.yellow('--- Purging node modules cache...')) diff --git a/yarn.lock b/yarn.lock index 3941aa337099d327312c7f2bdd9b8410fe84024f..2454c23af308b9ccc29ee9ec89b5306ea0586bc3 100644 GIT binary patch delta 352 zcmV~$ODMx}00;1Xum67(OEmJB86xy2HixM>ptKSPE^9TdRy$xG5hYQQh~?rZv!pZ! ziX`8>G9(u%@;J#Og#)<^2jTO%U(MdUG7oN$oiJ&U3rW-^Kh73l8TB6|v~sq|O+eMd-5`b5GQrDt!|| z591$|7IayepGd!z2^^KN6@tfdwnK4aV{rtxc6P3zr;T+G{P~&5i_B{lt|jgA)<_&E z%i?ocqPx}O?Udw!RD)OZ#tc+=c)%rJC#hbxhj=ke`@D(5amc@GP!%KU%~&oJb_+h6 zMV$ezUg0dn!=gB;M<^=hw~)N1#1rhUDqKR-MBSzy9FRCgpv%0D>m}0D3h4;9(b~?#;HDoqMQ@N6iWGYo6 z)=I1x3}oct?zwYn+xpzk*SD?v-*EX~{!`cgUOm%p?_s~-XRMdLzPY=rj@oTo3i$Im z%S!=tghhqpL`Ih>uP{30If+4RTpPe`+9NMTM6c-zB# zX<7}SEIDo*(@u5V9Ls|kJ2Vo9c|%N;STaga;)<7zWvNmP2U)W-m8P>~v?i!x3HhTuqn?qRS{JbAo zKW?pG%-h@hKC{^&y^-lWh4G{1{=U)cfFu0yZ}!YtpZICBtF+rQ*oOV^;N`X<>5Lr# zc*knrkGrniX+L!|YU<8T79P0x<=?xW_ir`Zx(^Sw(w;#V}J@&dM^qAIomuq17 zcU||fet4+k2~Na}f)JqsVyL2XJfljKWw9p8I!3yv>WVHgqV>v??yXD3`FibU3|TQ5Glaq+o_J@1t{uot^jtlQIx4^$gMV1gnySaK!0uitCWXtUjh ztHR<_zjWPk&vtt>G>PGBk(qDU?wNbeZG!dlkFGyb)o!_Ht9Q$i&wdMH9o2MRCzP@Z z&oUwsRZW&K!m=oAdAIz~*)Ali&yFMO%jirvr}et0D??kK8_ywOY;)Qf%P}0I>saJ@Ro7K2 zV@{N)DseKTk;pMTt6`+dg7sp;wOu((-+1TrC3Y=aa#!`;R6 z7587Ron0mL-lNvzZ@KmpZ>PBP;?snO6o7Ph1VF3+^-h991ol-Z=CLAUsD*^(ISff4 zvP97Y3%#^<{uz0#V#+B^jMqZ7d{+<6$EsdHhcVgc=ZhgMmeqh+9yjOBO0IzhLtG9P zV-2jLzSr!wa`7xdDZZJ&y#@*8>*>LyXJ#^@x81+~`e!fR(H#1^$}&~#oVU`yd#v z+ud8237?D~HYUdg#pOmV7#Zy6(;Tnwmn)WQ$9-Y(xkugKdt{yUPyN${_Q&pjUfWn! zLF>gIY=*-pH4)5sa=1)3M-W9}IZ9Po(XdWsg~dFQu&7Z;GFFtaz-YQCan@6>ZJfV? ze1m2=Fj6M6S4p5)nC6FOlF!!X2aRN6cOie?efFFOUE5iB|J*$|yVf0yh&6cCvu$-B zU-w)8@sj(l)3gv}!cvBl86@*O!y-ncpf4Ol{}MTnKvy8|E%eITdFzEW_j;wlBdlnm z(o7k_`f)Q6svX6cNU$^uN98`3%+w~SpmbQ_hca!}jWk~>W+R!h5Dm||LsC~-qee|f zlj@*69*hUUUU)hYCxyjxUw8lX>ZRUDZ4)2q>O}W}%p-zL#V3B#GjuJ!^swg}?oFq74fib9o33r^tAD$G?T}3ShTV`< zV`P%$WzgweRviSgY)Gznvc*RaMyv)2HBXtJvCJYt|GvmC^eXceB`*t!?_!ptn3 zYfq$P;UGBGIzqL$pG&lQEjqUD{F}84AG+yPJ6E4%AU;+-XjPzK37z)w5qQWLYS7JL zFY@z?g3oim>&#OAy(g`oeby7QzUcE@VVnm!@r_jtb5vyz%d^lRIgyB1RH?}7I)fJ@j>mQHKjo z=U-62=U@D1$y6OtN{l`0Yj13Z6107?07iNH#EHkHIT$QcowCpdpmGw9V>B#L%!(or zD4_^J9Ywr~t=F$@-hG)5&_tlQpKld1xqPNeJArZJpwOtzgNxt(h-d8DL||mQfW7#r z2a(&AVZ&&4>w7l-o?2hpSl^8_R%;;6$)O{RhC|r|2tP#c(d8*L!D|HBcBt8 zJ69dS7T^Dl=gaYjml$`C0}a?~J1fY$w?w=*#T{qe`P@cef3*V?SsnSDTOB3}BQp{YYZxH~K$(Oo*m((B=4~yICOu*_<}2cGN8GC0HN7Z}alU*pqwOJso`Nr)|YFiQU5i0-aoQ zijYt1bvjB{hxq~BdWjfD&#(sz1odDg#PX)o=>$XYoD2*mJVr5epEBGA7q;7~qP3ix zoh|2@ee2(@xRQ(S+_v`o18${jeY)?uXua{Tu4}<1!myp?Kn*Aeb~ZU9Ayufrs{&O8 zU1d2{q)?Lu7JS7MMMPAVz?+CzuRZSCy4)4ywmCMxz-?_H#k*1SoqCKc( z%#`TOl}y$vwe|;kyWfx&=l^WY6TN5I8}^p1;iimZtbWiHh;_)Se}GV@$6!PogFEt2 zm=8Ev(x7EAq9_Y2uRj>+2d1O$;qP84$gdPG%eq;dZlzX;;VlawR)0RT_>b zA3CW{t0J+?kCo$gu0FDBmkwd^8*i>vt}fE&*K6G7YT|n~xE{&eI01sRH=a77w=^I$ zR-Ir<6^S6k$*PW746_;s6R55TEJKhATm@sj_`93iwT>e6DqN|>4063xBsl9-d*yLU zis%QCo~oC`PM{NvHNr`$*c*-NjUFdyMm&@&20Jr8)XQmOX<{h(j4&N_lzySlH`-h? zQGnqgwqD*o$M%fooY-yFdh@>kGd}TuZhyL`jLgRJ3Mg^IncYwAu47vrK&}S<<`In0 zMAczpz^p>R{Su6uh@%Qrbe)r|;E!x1UY?JWMHCfeVVD{4!9iLHm4(3+C9F4}--Oup zcs5h$2CemttrrA}wkFUh$n7YZMFO~J&4Jns$EmHC7utOuIzOP2Pq=dMNc_tw9g zbJLlne8UyCUijhqZO-trq8?OM(^+1FkmO|;$?YRnhD?HZkTgN3fNmiat`8pHu8l$x zppGa^S5y}ywb~{zxy;96!&v?}X{3Xdev!`~C5%R>n^217Lh*Q@CriUbs+G$NxnLry z9*eP@p^YH=``P0oZ=F=x`tIVke_H>-Us+c_wYg>e{!5#F>#b)t-p4@mUU^lgkkm4V zi6U!=MHC1soyaOL36NeG7!nSKL&hrHyI~`5EVTOt??8(*0;w|Tphj;t@0N=*E~+j5 z=$VZNU3a~c3wrLZGZ(GgQ=8Z7D^ld0TWml7Iw}d~=o49?ULpahNZ_!5Z3d0N{KYaz z*ANf|io}Z)O9IS-8$c0={XmMPf}Zf|8EJN0EJR{PxiI7#3F^66KNAUJZ@)V2 zAMm|+s#r<|4`%6!T#)0HTDYuF%w7*HP>RP2W>#k6T4dr-1W@|}F4>wMuiw9T`TEAE zuWk6$nMsD$;FBBSlmV@)*IBNBUN@dXHrdt0P9}~Dk^+J=3gIQF2QXLyYLK9wVHodJ zk=H><%k_q53#?vdn++T^+D$nxwhv30OgNb}5o%@Y!^H=`z43=z8(L%3ZS7ePJmCo? z45H{hXTg5`=?U2J`m92hw519C^$rFoyy@sY&r;{S~gox9K{ELW@5N+sL{+k z--xQM+&~}Ba?xUU)H|^5{N(y2>)Sur*m%HhGOM;fX_^gRuTQ!@pv-J^VkbOad>6KD zJpldu1Ybb)g1fHm+>AP`!Om)e&fk2&F$2(=r}u*&PFfq!Qdy(2jCf3_rV$<*FqOa> zvLvWF!vSq0L1EteclU*lX(K5T^=8c0B*BH#i83>l^U@$&PDdMsFn64$({z#1_OmS# zn-3*1ngte4PUe!6Y@rxH^RTFt4zp^7Y1D^Rc^*3yQNkQgYyxd5?-C;|`M^`{-d?mu zJDx3z$*f(=+_?W#yXDRsx(-nVaHyiI26hI!jwm=hiN;H=6whH0mtdOb9T9H^CX!Z+-2$%l{!~!>NeQ zKF2&P!?MtyFy$qUlc~S}9*{+tfD{Cf07`*E5T`QEH-ZexwR+u1VPY0LM`O;6)S_&) z-j6ZKT2hLRQNGcxmw9i* z&~?|{7eD+?Y6f&$kuys)wAA|~`y9=G9f)^)4`cn(CQn-Zru(ABd}#gJ-pxjE*oq}N zDapY55Eg;XK<5cIk4-TUL82$whu6xLp za>bZxIJe1#&XX~DLFcOc<0H(|F9v-JsQYIgW#p9`dS$p}x&8_#{acyV$ePi!Y z=kvy5E4KO5%Xgd(pR1wxXY(_=mEkReDadx~k~1ruu=}#6VMr0IA%)kWGcZ)*2=Fq{ z*9eTAnD7YccK%*LNAqy+C?D$U!C9CqkF=RQDo*q5h;HYv^@O^4WpPp6jQj6A8LUp{ zDu?|1$nfov>-35(-n%S2ZEOi}SOUn4$iRR~x~PjHFEK1IlZrw#j%5X4EqMacJHWQu zoKGW(a%rZCB`@F0HKTz_v3aOKz3k*G(?nO_&yEG*I0+xh4pF%}ZG^_nUN%$88%K$P zt{jEijc$FE(<}3MFd#OPcp7bt3X8+fY<`~H>u@J~OX}p7Bt+Mr`lc^(!mof*Cx;FN z$uCn8xOiR@VVWTl)mUiCR076VkO@x+Cu=a&122IcvZ#{E$2(j#&FkTV)SN+;QAC%d zF!iS9v#!!>l+rQ{H5ge))q>Mf>ZlM-u%nPs9hl={hBGEc!YCF~Lp`0%qyz1)qEt)8 zX3u*1yPiv1H-HC+y0d%Z;urtD`TY7FCzRfv&FBGqfAyg=yVf^-;^D`9YM=A9R@cNGwf4#vs!e1oPDz zuGbfT@IRZ+UGYE>SX{b%=7*1+d%G)JAML=X{h2pB*9tecYq0Ch3lPb+ptfL=C_)x1 z&`#mQrzW#RRS^}SwL)`XuqtE1h%i1Z0muB1M_O|KASMh#&31K^(MT#gsbFIcADs^C zrYwX;r2^+Q4|