Compare commits
2 Commits
7bd4b58a0d
...
c8dc7a9507
Author | SHA1 | Date |
---|---|---|
tungd0 | c8dc7a9507 | 1 year ago |
tungd0 | a2c21de092 | 1 year ago |
@ -1,111 +1,112 @@ |
||||
{ |
||||
"name": "backend-image", |
||||
"version": "1.0.0", |
||||
"description": "Service backend image", |
||||
"author": "Mạnh Tiến", |
||||
"main": "/src/index.js", |
||||
"private": true, |
||||
"license": "MIT", |
||||
"engines": { |
||||
"node": ">=8", |
||||
"yarn": "*" |
||||
}, |
||||
"nyc": { |
||||
"require": [ |
||||
"babel-register" |
||||
], |
||||
"sourceMap": false, |
||||
"instrument": false |
||||
}, |
||||
"scripts": { |
||||
"clean": "./node_modules/.bin/rimraf dist -p", |
||||
"build": "yarn run clean && mkdir -p dist && ./node_modules/.bin/babel src -s -D -d dist", |
||||
"start": "cross-env NODE_ENV=production pm2 start ./dist/index.js", |
||||
"start-worker": "cross-env NODE_ENV=production pm2 start docker-process.yml --only worker", |
||||
"start-event-dispatcher": "cross-env NODE_ENV=production pm2 start docker-process.yml --only event-dispatcher", |
||||
"dev": "nodemon src/index.js --exec ./node_modules/.bin/babel-node", |
||||
"dev-worker": "nodemon src/index-worker.js --exec ./node_modules/.bin/babel-node", |
||||
"dev-event-dispatcher": "nodemon src/index-event-dispatcher.js --exec ./node_modules/.bin/babel-node", |
||||
"lint": "eslint **/*.js --ignore-path .gitignore --ignore-pattern internals/scripts", |
||||
"lint:fix": "yarn lint -- --fix", |
||||
"lint:watch": "yarn lint -- --watch", |
||||
"test": "cross-env NODE_ENV=test nyc --reporter=html --reporter=text mocha --timeout 20000 --recursive src/api/tests", |
||||
"test:unit": "cross-env NODE_ENV=test mocha dist/api/tests/unit", |
||||
"test:integration": "cross-env NODE_ENV=test mocha --timeout 20000 dist/api/tests/integration", |
||||
"test:watch": "cross-env NODE_ENV=test mocha --watch dist/api/tests/unit", |
||||
"coverage": "nyc report --reporter=text-lcov | coveralls", |
||||
"validate": "yarn lint && yarn test", |
||||
"postpublish": "git push --tags", |
||||
"docker:start": "node ./dist/index.js --exec ./node_modules/.bin/babel-node", |
||||
"docker:dev": "docker-compose -f docker-compose.yml -f docker-compose.dev.yml up", |
||||
"docker:test": "docker-compose -f docker-compose.yml -f docker-compose.test.yml up --abort-on-container-exit" |
||||
}, |
||||
"repository": { |
||||
"type": "git", |
||||
"url": "git@gitlab.com:csell-team/b2c/sv-backend-file.git" |
||||
}, |
||||
"dependencies": { |
||||
"amqplib": "^0.5.2", |
||||
"archiver": "^5.3.1", |
||||
"auth-adapter": "1.1.0", |
||||
"axios": "^0.18.0", |
||||
"bcryptjs": "^2.4.3", |
||||
"bluebird": "^3.5.2", |
||||
"body-parser": "^1.17.0", |
||||
"bull": "^3.4.1", |
||||
"busboy": "^1.6.0", |
||||
"compression": "^1.6.2", |
||||
"cors": "^2.8.3", |
||||
"cross-env": "^5.0.1", |
||||
"dotenv-safe": "^5.0.1", |
||||
"exceljs": "^4.3.0", |
||||
"express": "^4.15.2", |
||||
"express-validation": "^1.0.2", |
||||
"fs-extra": "^10.1.0", |
||||
"helmet": "^3.5.0", |
||||
"http-status": "^1.0.1", |
||||
"i18n": "^0.8.3", |
||||
"image-downloader": "^4.3.0", |
||||
"ioredis": "^4.14.0", |
||||
"joi": "^10.4.1", |
||||
"jsonwebtoken": "^8.5.1", |
||||
"jszip": "^3.10.1", |
||||
"lodash": "^4.17.4", |
||||
"method-override": "^2.3.8", |
||||
"moment-timezone": "^0.5.13", |
||||
"mongoose": "^5.7.0", |
||||
"morgan": "^1.8.1", |
||||
"multer": "^1.4.2", |
||||
"nanoid": "^2.0.3", |
||||
"pg": "^8.5.1", |
||||
"pg-hstore": "^2.3.3", |
||||
"pm2": "^2.4.6", |
||||
"query-string": "^7.0.0", |
||||
"rabbit-event-source": "1.0.0", |
||||
"request": "^2.88.2", |
||||
"sequelize": "^6.3.5", |
||||
"sharp": "^0.30.6", |
||||
"uuid": "^9.0.0", |
||||
"xlsx": "^0.16.9" |
||||
}, |
||||
"devDependencies": { |
||||
"@types/archiver": "^5.3.2", |
||||
"babel-cli": "^6.26.0", |
||||
"babel-plugin-istanbul": "^4.1.6", |
||||
"babel-preset-env": "^1.6.1", |
||||
"chai": "^4.1.0", |
||||
"chai-as-promised": "^7.1.1", |
||||
"coveralls": "^3.0.0", |
||||
"eslint": "^4.2.0", |
||||
"eslint-config-airbnb-base": "^12.0.1", |
||||
"eslint-plugin-import": "^2.2.0", |
||||
"husky": "^0.14.3", |
||||
"mocha": "^3.3.0", |
||||
"nodemon": "^1.11.0", |
||||
"nyc": "^11.0.3", |
||||
"rimraf": "^2.6.2", |
||||
"sinon": "^6.1.0", |
||||
"sinon-chai": "^3.0.0", |
||||
"supertest": "^3.0.0" |
||||
} |
||||
} |
||||
{ |
||||
"name": "backend-image", |
||||
"version": "1.0.0", |
||||
"description": "Service backend image", |
||||
"author": "Mạnh Tiến", |
||||
"main": "/src/index.js", |
||||
"private": true, |
||||
"license": "MIT", |
||||
"engines": { |
||||
"node": ">=8", |
||||
"yarn": "*" |
||||
}, |
||||
"nyc": { |
||||
"require": [ |
||||
"babel-register" |
||||
], |
||||
"sourceMap": false, |
||||
"instrument": false |
||||
}, |
||||
"scripts": { |
||||
"clean": "./node_modules/.bin/rimraf dist -p", |
||||
"build": "yarn run clean && mkdir -p dist && ./node_modules/.bin/babel src -s -D -d dist", |
||||
"start": "cross-env NODE_ENV=production pm2 start ./dist/index.js", |
||||
"start-worker": "cross-env NODE_ENV=production pm2 start docker-process.yml --only worker", |
||||
"start-event-dispatcher": "cross-env NODE_ENV=production pm2 start docker-process.yml --only event-dispatcher", |
||||
"dev": "nodemon src/index.js --exec ./node_modules/.bin/babel-node", |
||||
"dev-worker": "nodemon src/index-worker.js --exec ./node_modules/.bin/babel-node", |
||||
"dev-event-dispatcher": "nodemon src/index-event-dispatcher.js --exec ./node_modules/.bin/babel-node", |
||||
"lint": "eslint **/*.js --ignore-path .gitignore --ignore-pattern internals/scripts", |
||||
"lint:fix": "yarn lint -- --fix", |
||||
"lint:watch": "yarn lint -- --watch", |
||||
"test": "cross-env NODE_ENV=test nyc --reporter=html --reporter=text mocha --timeout 20000 --recursive src/api/tests", |
||||
"test:unit": "cross-env NODE_ENV=test mocha dist/api/tests/unit", |
||||
"test:integration": "cross-env NODE_ENV=test mocha --timeout 20000 dist/api/tests/integration", |
||||
"test:watch": "cross-env NODE_ENV=test mocha --watch dist/api/tests/unit", |
||||
"coverage": "nyc report --reporter=text-lcov | coveralls", |
||||
"validate": "yarn lint && yarn test", |
||||
"postpublish": "git push --tags", |
||||
"docker:start": "node ./dist/index.js --exec ./node_modules/.bin/babel-node", |
||||
"docker:dev": "docker-compose -f docker-compose.yml -f docker-compose.dev.yml up", |
||||
"docker:test": "docker-compose -f docker-compose.yml -f docker-compose.test.yml up --abort-on-container-exit" |
||||
}, |
||||
"repository": { |
||||
"type": "git", |
||||
"url": "git@gitlab.com:csell-team/b2c/sv-backend-file.git" |
||||
}, |
||||
"dependencies": { |
||||
"@shopify/cli": "^3.45.4", |
||||
"amqplib": "^0.5.2", |
||||
"archiver": "^5.3.1", |
||||
"auth-adapter": "1.1.0", |
||||
"axios": "^0.18.0", |
||||
"bcryptjs": "^2.4.3", |
||||
"bluebird": "^3.5.2", |
||||
"body-parser": "^1.17.0", |
||||
"bull": "^3.4.1", |
||||
"busboy": "^1.6.0", |
||||
"compression": "^1.6.2", |
||||
"cors": "^2.8.3", |
||||
"cross-env": "^5.0.1", |
||||
"dotenv-safe": "^5.0.1", |
||||
"exceljs": "^4.3.0", |
||||
"express": "^4.15.2", |
||||
"express-validation": "^1.0.2", |
||||
"fs-extra": "^10.1.0", |
||||
"helmet": "^3.5.0", |
||||
"http-status": "^1.0.1", |
||||
"i18n": "^0.8.3", |
||||
"image-downloader": "^4.3.0", |
||||
"ioredis": "^4.14.0", |
||||
"joi": "^10.4.1", |
||||
"jsonwebtoken": "^8.5.1", |
||||
"jszip": "^3.10.1", |
||||
"lodash": "^4.17.4", |
||||
"method-override": "^2.3.8", |
||||
"moment-timezone": "^0.5.13", |
||||
"mongoose": "^5.7.0", |
||||
"morgan": "^1.8.1", |
||||
"multer": "^1.4.2", |
||||
"nanoid": "^2.0.3", |
||||
"pg": "^8.5.1", |
||||
"pg-hstore": "^2.3.3", |
||||
"pm2": "^2.4.6", |
||||
"query-string": "^7.0.0", |
||||
"rabbit-event-source": "1.0.0", |
||||
"request": "^2.88.2", |
||||
"sequelize": "^6.3.5", |
||||
"sharp": "^0.30.6", |
||||
"uuid": "^9.0.0", |
||||
"xlsx": "^0.16.9" |
||||
}, |
||||
"devDependencies": { |
||||
"@types/archiver": "^5.3.2", |
||||
"babel-cli": "^6.26.0", |
||||
"babel-plugin-istanbul": "^4.1.6", |
||||
"babel-preset-env": "^1.6.1", |
||||
"chai": "^4.1.0", |
||||
"chai-as-promised": "^7.1.1", |
||||
"coveralls": "^3.0.0", |
||||
"eslint": "^4.2.0", |
||||
"eslint-config-airbnb-base": "^12.0.1", |
||||
"eslint-plugin-import": "^2.2.0", |
||||
"husky": "^0.14.3", |
||||
"mocha": "^3.3.0", |
||||
"nodemon": "^1.11.0", |
||||
"nyc": "^11.0.3", |
||||
"rimraf": "^2.6.2", |
||||
"sinon": "^6.1.0", |
||||
"sinon-chai": "^3.0.0", |
||||
"supertest": "^3.0.0" |
||||
} |
||||
} |
||||
|
@ -0,0 +1 @@ |
||||
{"status":"error","message":"bad request"} |
Binary file not shown.
Binary file not shown.
@ -0,0 +1,45 @@ |
||||
import torch |
||||
|
||||
torch.manual_seed(2023) |
||||
|
||||
|
||||
def activation_func(x): |
||||
#TODO Implement one of these following activation function: sigmoid, tanh, ReLU, leaky ReLU |
||||
epsilon = 0.01 # Only use this variable if you choose Leaky ReLU |
||||
result = None |
||||
return result |
||||
|
||||
def softmax(x): |
||||
# TODO Implement softmax function here |
||||
result = None |
||||
return result |
||||
|
||||
|
||||
# Define the size of each layer in the network |
||||
num_input = 784 # Number of node in input layer (28x28) |
||||
num_hidden_1 = 128 # Number of nodes in hidden layer 1 |
||||
num_hidden_2 = 256 # Number of nodes in hidden layer 2 |
||||
num_hidden_3 = 128 # Number of nodes in hidden layer 3 |
||||
num_classes = 10 # Number of nodes in output layer |
||||
|
||||
# Random input |
||||
input_data = torch.randn((1, num_input)) |
||||
# Weights for inputs to hidden layer 1 |
||||
W1 = torch.randn(num_input, num_hidden_1) |
||||
# Weights for hidden layer 1 to hidden layer 2 |
||||
W2 = torch.randn(num_hidden_1, num_hidden_2) |
||||
# Weights for hidden layer 2 to hidden layer 3 |
||||
W3 = torch.randn(num_hidden_2, num_hidden_3) |
||||
# Weights for hidden layer 3 to output layer |
||||
W4 = torch.randn(num_hidden_3, num_classes) |
||||
|
||||
# and bias terms for hidden and output layers |
||||
B1 = torch.randn((1, num_hidden_1)) |
||||
B2 = torch.randn((1, num_hidden_2)) |
||||
B3 = torch.randn((1, num_hidden_3)) |
||||
B4 = torch.randn((1, num_classes)) |
||||
|
||||
#TODO Calculate forward pass of the network here. Result should have the shape of [1,10] |
||||
# Dont forget to check if sum of result = 1.0 |
||||
result = None |
||||
print(result) |
@ -0,0 +1 @@ |
||||
{"status":"error","message":"bad request"} |
Before Width: | Height: | Size: 100 KiB |
Before Width: | Height: | Size: 284 KiB |
Before Width: | Height: | Size: 284 KiB |
Before Width: | Height: | Size: 93 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,100 +1,130 @@ |
||||
/* eslint-disable camelcase */ |
||||
|
||||
import path from 'path'; |
||||
import Busboy from 'busboy'; |
||||
import fs from 'fs-extra'; |
||||
import multer from 'multer'; |
||||
import httpStatus from 'http-status'; |
||||
// import moment from 'moment-timezone';
|
||||
import { handler as ErrorHandel } from '../../middlewares/errors'; |
||||
import ApiException from '../../../common/utils/APIException'; |
||||
// import eventBus from '../../../common/services/event-bus';
|
||||
// import Image from '../../../common/models/image.model';
|
||||
import { |
||||
cdn as cdnConfig, |
||||
storage as storageConfig |
||||
} from '../../../config/vars'; |
||||
/** storage will create folder when new date */ |
||||
|
||||
|
||||
const filePath = `${storageConfig.uri}`; |
||||
|
||||
|
||||
const replaceBaseUrl = (location) => |
||||
location.replace(storageConfig.uri, cdnConfig.uri); |
||||
|
||||
|
||||
exports.uploadSingle = (req, res, next) => { |
||||
try { |
||||
if (!req.file) { |
||||
throw new ApiException({ |
||||
status: httpStatus.BAD_REQUEST, |
||||
message: 'Invalid file!' |
||||
}); |
||||
} |
||||
/** resize image uploaded */ |
||||
// eventBus.emit(Image.Events.IMAGE_CREATED, req.file);
|
||||
return res.json({ url: replaceBaseUrl(req.file.path) }); |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* Upload multiple |
||||
* |
||||
* @param {Formdata} file |
||||
*/ |
||||
exports.uploadMultiple = (req, res, next) => { |
||||
try { |
||||
if (!req.files) { |
||||
throw new ApiException({ |
||||
status: httpStatus.BAD_REQUEST, |
||||
message: 'Invalid file!' |
||||
}); |
||||
} |
||||
const urls = []; |
||||
for (let index = 0; index < req.files.length; index += 1) { |
||||
urls.push(replaceBaseUrl(req.files[index].path)); |
||||
/** resize image uploaded */ |
||||
// eventBus.emit(Image.Events.IMAGE_CREATED, req.files[index]);
|
||||
} |
||||
return res.json({ urls: urls }); |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
exports.uploadFile = (req, res, next) => { |
||||
try { |
||||
console.log(req.query); |
||||
let filename = null; |
||||
const cfg = { highWaterMark: 1048576 * 2 }; // 20 mb
|
||||
cfg.headers = req.headers; |
||||
req.busboy = Busboy(cfg); |
||||
const pathName = `${filePath}/${req.query.path}`; |
||||
multer({ |
||||
dest: `${filePath}`, |
||||
limits: { |
||||
fileSize: 1024 * 1024 * 2048 // 2048MB
|
||||
}, }); |
||||
req.pipe(req.busboy); // Pipe it trough busboy
|
||||
return req.busboy.on('file', (name, file, info) => { |
||||
filename = info.filename; |
||||
|
||||
// Create a write stream of the new file
|
||||
const fstream = fs.createWriteStream(path.join(pathName, filename)); |
||||
// Pipe it trough
|
||||
file.pipe(fstream); |
||||
|
||||
// On finish of the upload
|
||||
fstream.on('close', () => { |
||||
console.log(`Upload of '${filename}' finished`); |
||||
return res.json({ url: replaceBaseUrl(`${pathName}/${filename}`) }); |
||||
}); |
||||
}); |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
/* eslint-disable camelcase */ |
||||
import { pick } from 'lodash'; |
||||
import path from 'path'; |
||||
import Busboy from 'busboy'; |
||||
import fs from 'fs-extra'; |
||||
import multer from 'multer'; |
||||
import messages from '../../../config/messages'; |
||||
import httpStatus from 'http-status'; |
||||
// import moment from 'moment-timezone';
|
||||
import { handler as ErrorHandel } from '../../middlewares/errors'; |
||||
import ApiException from '../../../common/utils/APIException'; |
||||
// import eventBus from '../../../common/services/event-bus';
|
||||
import File from "../../../common/models/file.model"; |
||||
import { handler as ErrorHandler } from '../../middlewares/error'; |
||||
// import Image from '../../../common/models/image.model';
|
||||
import { |
||||
cdn as cdnConfig, |
||||
storage as storageConfig |
||||
} from '../../../config/vars'; |
||||
/** storage will create folder when new date */ |
||||
|
||||
|
||||
const filePath = `${storageConfig.uri}`; |
||||
|
||||
|
||||
const replaceBaseUrl = (location) => |
||||
location.replace(storageConfig.uri, cdnConfig.uri); |
||||
|
||||
|
||||
exports.uploadSingle = (req, res, next) => { |
||||
try { |
||||
if (!req.file) { |
||||
throw new ApiException({ |
||||
status: httpStatus.BAD_REQUEST, |
||||
message: 'Invalid file!' |
||||
}); |
||||
} |
||||
/** resize image uploaded */ |
||||
// eventBus.emit(Image.Events.IMAGE_CREATED, req.file);
|
||||
// await File.
|
||||
return res.json({ url: replaceBaseUrl(req.file.path) }); |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* Upload multiple |
||||
* |
||||
* @param {Formdata} file |
||||
*/ |
||||
exports.uploadMultiple = (req, res, next) => { |
||||
try { |
||||
// if (!req.files) {
|
||||
// throw new ApiException({
|
||||
// status: httpStatus.BAD_REQUEST,
|
||||
// message: 'Invalid file!'
|
||||
// });
|
||||
// }
|
||||
const urls = []; |
||||
const user = req.locals.user; |
||||
let data = {} |
||||
|
||||
// const file = req.files;
|
||||
|
||||
|
||||
for (let index = 0; index < req.files.length; index += 1) { |
||||
urls.push(replaceBaseUrl(req.files[index].path)); |
||||
data.url = replaceBaseUrl(req.files[index].path); |
||||
data.name = req.files[index].originalname; |
||||
data.created_by = pick(user, ['id', 'name']); |
||||
// File.create()
|
||||
|
||||
/** resize image uploaded */ |
||||
// eventBus.emit(Image.Events.IMAGE_CREATED, req.files[index]);
|
||||
File.create(data) |
||||
.then(result => { |
||||
res.json({ |
||||
code: 0, |
||||
message: messages.UPLOAD_SUCCESS, |
||||
}) |
||||
|
||||
}).catch(err => { |
||||
ErrorHandler(err, req, res, next); |
||||
|
||||
}) |
||||
} |
||||
// console.log(urls);
|
||||
// return res.json({ urls: urls,data : data});
|
||||
} catch (ex) { |
||||
console.log("error"); |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
exports.uploadFile = (req, res, next) => { |
||||
try { |
||||
console.log(req.query); |
||||
let filename = null; |
||||
const cfg = { highWaterMark: 1048576 * 2 }; // 20 mb
|
||||
cfg.headers = req.headers; |
||||
req.busboy = Busboy(cfg); |
||||
const pathName = `${filePath}/${req.query.path}`; |
||||
multer({ |
||||
dest: `${filePath}`, |
||||
limits: { |
||||
fileSize: 1024 * 1024 * 2048 // 2048MB
|
||||
}, }); |
||||
req.pipe(req.busboy); // Pipe it trough busboy
|
||||
return req.busboy.on('file', (name, file, info) => { |
||||
filename = info.filename; |
||||
|
||||
// Create a write stream of the new file
|
||||
const fstream = fs.createWriteStream(path.join(pathName, filename)); |
||||
// Pipe it trough
|
||||
file.pipe(fstream); |
||||
|
||||
// On finish of the upload
|
||||
fstream.on('close', () => { |
||||
console.log(`Upload of '${filename}' finished`); |
||||
return res.json({ url: replaceBaseUrl(`${pathName}/${filename}`) }); |
||||
}); |
||||
}); |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
|
||||
|
||||
|
@ -1,249 +1,360 @@ |
||||
/* eslint-disable import/no-extraneous-dependencies */ |
||||
// import httpStatus from 'http-status';
|
||||
import fs from 'fs'; |
||||
import archiver from 'archiver'; |
||||
import { exec } from 'child_process'; |
||||
import multer from 'multer'; |
||||
import path from 'path'; |
||||
import { cloneDeep } from 'lodash'; |
||||
import { handler as ErrorHandel } from '../../middlewares/errors'; |
||||
// import ApiException from '../../../common/utils/APIException';
|
||||
import { |
||||
|
||||
cdn as cdnConfig, |
||||
storage as storageConfig |
||||
} from '../../../config/vars'; |
||||
import uploadAdapter from '../../../common/services/adapters/upload-adapter'; |
||||
|
||||
function deleteFolderRecursive(folderPath) { |
||||
if (fs.existsSync(folderPath)) { |
||||
fs.readdirSync(folderPath).forEach((file) => { |
||||
const curPath = path.join(folderPath, file); |
||||
if (fs.lstatSync(curPath).isDirectory()) { // delete folder
|
||||
deleteFolderRecursive(curPath); // recursively call deleteFolderRecursive function
|
||||
fs.rmdirSync(curPath); |
||||
} else { // delete file
|
||||
fs.unlinkSync(curPath); |
||||
} |
||||
}); |
||||
} |
||||
} |
||||
/** |
||||
* get file and folder |
||||
* |
||||
* @param {Formdata} file |
||||
*/ |
||||
exports.get = (req, res, next) => { |
||||
try { |
||||
const user = req.user; |
||||
|
||||
let path = `${storageConfig.uri}/${user.id}`; |
||||
if (req.body.path) { |
||||
path += req.body.path; |
||||
} |
||||
// console.log(path);
|
||||
const listFile = []; |
||||
fs.readdir(path, (err, files) => { |
||||
if (files && files.length > 0) { |
||||
files.forEach((item) => { |
||||
listFile.push({ |
||||
name: item, |
||||
path: `${cdnConfig.uri}/${user.id}${req.body.path}/${item}`, |
||||
isFolder: fs.lstatSync(`${storageConfig.uri}/${user.id}/${req.body.path}/${item}`).isDirectory() |
||||
}); |
||||
}); |
||||
} |
||||
return res.json({ |
||||
code: 0, |
||||
data: listFile |
||||
}); |
||||
}); |
||||
// test local
|
||||
// fs.readdir(path, (err, files) => {
|
||||
// if (files && files.length > 0) {
|
||||
// files.forEach((item) => {
|
||||
// listFile.push({
|
||||
// name: item,
|
||||
// path: `${storageConfig.uri}/${user.id}${req.body.path}/${item}`,
|
||||
// isFolder: fs.lstatSync(`${storageConfig.uri}/${user.id}/${req.body.path}/${item}`).isDirectory()
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
// return res.json({
|
||||
// code: 0,
|
||||
// data: listFile
|
||||
// });
|
||||
// });
|
||||
|
||||
|
||||
return null; |
||||
/** resize image uploaded */ |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* get file and folder |
||||
* |
||||
* @param {Formdata} file |
||||
*/ |
||||
exports.create = (req, res, next) => { |
||||
const user = req.user; |
||||
let dir = `${user.id}`; |
||||
const name_folder = cloneDeep(req.body.name).trim(); |
||||
if (req.body.path) { |
||||
dir += req.body.path; |
||||
} |
||||
if (req.body.name) { |
||||
dir += `/${name_folder}`; |
||||
} |
||||
if (!fs.existsSync(dir)) { |
||||
uploadAdapter.createFolder({ path: dir }); |
||||
} |
||||
return res.json({ code: 0, message: 'success' }); |
||||
}; |
||||
|
||||
/** |
||||
* get file and folder |
||||
* |
||||
* @param {Formdata} file |
||||
*/ |
||||
exports.update = (req, res, next) => { |
||||
try { |
||||
const oldPath = req.body.oldPath.replace(cdnConfig.uri, storageConfig.uri); |
||||
const newPath = req.body.newPath.replace(cdnConfig.uri, storageConfig.uri); |
||||
|
||||
fs.rename(oldPath, newPath, (err) => { |
||||
if (err) { |
||||
console.log(err); |
||||
return res.status(400).json({ code: 400, message: 'lỗi' }); |
||||
} |
||||
return res.json({ code: 0, message: 'success' }); |
||||
}); |
||||
return null; |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
|
||||
exports.delete = (req, res, next) => { |
||||
try { |
||||
const user = req.user; |
||||
const dir = `${storageConfig.uri_backup}/${user.id}`; |
||||
multer({ dest: `${dir}` }); |
||||
const path = req.body.path.replace(cdnConfig.uri, storageConfig.uri); |
||||
const newpath = req.body.path.replace(cdnConfig.uri, storageConfig.uri_backup); |
||||
// fs.rm(path, { recursive: true }, err => {
|
||||
// if (err) {
|
||||
// return res.status(400).json({ code: 400, message: 'lỗi', detail: err });
|
||||
// }
|
||||
// return res.json({ code: 0, message: 'success' });
|
||||
// });
|
||||
fs.rename(path, newpath, (err) => { |
||||
if (err) throw err; |
||||
return res.json({ code: 0, message: 'success' }); |
||||
}); |
||||
return null; |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
exports.download = async (req, res, next) => { |
||||
try { |
||||
const user = req.user; |
||||
const namefile = `${user.name.replace(/\s/g, '')}-${Date.now()}.zip`; |
||||
const dir = `${storageConfig.uri}/download/${user.id}/${namefile}`; |
||||
const folder = `${storageConfig.uri}/download/${user.id}`; |
||||
multer({ dest: `${folder}` }); |
||||
await deleteFolderRecursive(folder); |
||||
let command = ` zip -r ${dir} `; |
||||
|
||||
// const output = fs.createWriteStream(dir);
|
||||
// const archive = archiver('zip', {
|
||||
// zlib: { level: 9 } // Sets the compression level.
|
||||
// });
|
||||
|
||||
|
||||
// archive.pipe(output);
|
||||
if (req.body.data) { |
||||
req.body.data.forEach((e) => { |
||||
const path1 = e.path.replace(cdnConfig.uri, storageConfig.uri).replace(/ /g, '\\ '); |
||||
|
||||
command += `${path1} `; |
||||
}); |
||||
} |
||||
console.log(command); |
||||
|
||||
|
||||
exec(command, (error, stdout, stderr) => { |
||||
if (error) { |
||||
console.error(`Command execution error: ${error.message}`); |
||||
return res.status(400).json({ |
||||
code: 400, |
||||
message: 'error' |
||||
}); |
||||
} |
||||
if (stderr) { |
||||
console.error(`Command stderr: ${stderr}`); |
||||
return res.status(400).json({ |
||||
code: 400, |
||||
message: 'error' |
||||
}); |
||||
} |
||||
console.log(`Command output: ${stdout}`); |
||||
return res.json({ |
||||
code: 0, |
||||
data: { |
||||
name: namefile, |
||||
path: `${cdnConfig.uri}/download/${user.id}/${namefile}`, |
||||
} |
||||
}); |
||||
}); |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
|
||||
exports.forceDelete = (req, res, next) => { |
||||
try { |
||||
const path = req.body.path.replace(cdnConfig.uri, storageConfig.uri); |
||||
// const newpath = req.body.path.replace(cdnConfig.uri, storageConfig.uri_backup);
|
||||
fs.rm(path, { recursive: true }, err => { |
||||
if (err) { |
||||
return res.status(400).json({ code: 400, message: 'lỗi', detail: err }); |
||||
} |
||||
return res.json({ code: 0, message: 'success' }); |
||||
}); |
||||
|
||||
return null; |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
exports.deleteMultiple = (req, res, next) => { |
||||
try { |
||||
const user = req.user; |
||||
const dir = `${storageConfig.uri_backup}/${user.id}/${Date.now()}`; |
||||
multer({ dest: `${dir}` }); |
||||
if (req.body.data) { |
||||
req.body.data.forEach((e) => { |
||||
const path = e.path.replace(cdnConfig.uri, storageConfig.uri); |
||||
let newpath = e.path.replace(cdnConfig.uri, storageConfig.uri_backup); |
||||
const split = newpath.split('/'); |
||||
newpath = `${dir}/${split[split.length - 1]}`; |
||||
fs.rename(path, newpath, (err) => { |
||||
if (err) throw err; |
||||
return { code: 0, message: 'success' }; |
||||
}); |
||||
}); |
||||
} |
||||
return res.json({ code: 0, message: 'success' }); |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
/* eslint-disable import/no-extraneous-dependencies */ |
||||
// import httpStatus from 'http-status';
|
||||
import fs from 'fs'; |
||||
import archiver from 'archiver'; |
||||
import multer from 'multer'; |
||||
import path from 'path'; |
||||
import { handler as ErrorHandel } from '../../middlewares/errors'; |
||||
import File from "../../../common/models/file.model"; |
||||
import { handler as ErrorHandler } from '../../middlewares/error'; |
||||
import ApiException from '../../../common/utils/APIException'; |
||||
import { |
||||
|
||||
cdn as cdnConfig, |
||||
storage as storageConfig |
||||
} from '../../../config/vars'; |
||||
import uploadAdapter from '../../../common/services/adapters/upload-adapter'; |
||||
import { cloneDeep, forEach } from 'lodash'; |
||||
import APIError from '../../middlewares/ApiError'; |
||||
import { example } from 'joi'; |
||||
import { where } from 'sequelize'; |
||||
import messages from '../../../config/messages'; |
||||
|
||||
function deleteFolderRecursive(folderPath) { |
||||
if (fs.existsSync(folderPath)) { |
||||
fs.readdirSync(folderPath).forEach((file) => { |
||||
const curPath = path.join(folderPath, file); |
||||
if (fs.lstatSync(curPath).isDirectory()) { // delete folder
|
||||
deleteFolderRecursive(curPath); // recursively call deleteFolderRecursive function
|
||||
fs.rmdirSync(curPath); |
||||
} else { // delete file
|
||||
fs.unlinkSync(curPath); |
||||
} |
||||
}); |
||||
} |
||||
} |
||||
/** |
||||
* get file and folder |
||||
* |
||||
* @param {Formdata} file |
||||
*/ |
||||
|
||||
|
||||
|
||||
exports.get = async (req, res, next) => { |
||||
try { |
||||
const user = req.user; |
||||
const user_infor = { |
||||
id: user.id, |
||||
name: user.name |
||||
}; |
||||
console.log(user_infor); |
||||
let path = `${storageConfig.uri}/${user.id}`; |
||||
console.log(path); |
||||
if (req.body.path) { |
||||
path += req.body.path; |
||||
} |
||||
const listFile = []; |
||||
// fs.readdir(path, (err, files) => {
|
||||
// if (files && files.length > 0) {
|
||||
// files.forEach((item) => {
|
||||
// listFile.push({
|
||||
// name: item,
|
||||
// path: `${cdnConfig.uri}/${user.id}/${req.body.path}/${item}`,
|
||||
// isFolder: fs.lstatSync(`${storageConfig.uri}/${user.id}/${req.body.path}/${item}`).isDirectory(),
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
// return res.json({
|
||||
// code: 0,
|
||||
// data: listFile
|
||||
// });
|
||||
// });
|
||||
|
||||
await File.findAll({ |
||||
where: { |
||||
is_active: true, |
||||
created_by : user_infor |
||||
} |
||||
}).then(result => { |
||||
let path = [] |
||||
result.forEach(data => { |
||||
console.log(data.created_by.id); |
||||
// console.log(`${storageConfig.uri}/${data.created_by.id}/${req.body.path}/${data.name}`);
|
||||
const file = { |
||||
name: data.name, |
||||
path: data.url, |
||||
// isFolder: fs.lstatSync(`${storageConfig.uri}/${data.created_by.id}/${req.body.path}/${data.name}`).isDirectory(),
|
||||
isFolder: fs.lstatSync(`${storageConfig.uri}/${data.created_by.id}/${req.body.path}/${data.name}`).isDirectory(), |
||||
download_count : data.download_count |
||||
|
||||
} |
||||
|
||||
path.push(file);
|
||||
}); |
||||
return res.json({ |
||||
code: 0,
|
||||
data : path |
||||
}) |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
|
||||
}) |
||||
|
||||
|
||||
// test local
|
||||
// fs.readdir(path, (err, files) => {
|
||||
// if (files && files.length > 0) {
|
||||
// files.forEach((item) => {
|
||||
// listFile.push({
|
||||
// name: item,
|
||||
// path: `${cdnConfig.uri}/${user.id}${req.body.path}/${item}`,
|
||||
|
||||
// path: `${storageConfig.uri}/${user.id}${req.body.path}/${item}`,
|
||||
// isFolder: fs.lstatSync(`${storageConfig.uri}/${user.id}/${req.body.path}/${item}`).isDirectory()
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
// return res.json({
|
||||
// code: 0,
|
||||
// data: listFile
|
||||
// });
|
||||
// });
|
||||
|
||||
|
||||
return null; |
||||
/** resize image uploaded */ |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* get file and folder |
||||
* |
||||
* @param {Formdata} file |
||||
*/ |
||||
exports.create = (req, res, next) => { |
||||
const user = req.user; |
||||
let dir = `${user.id}`; |
||||
const name_folder = cloneDeep(req.body.name).trim(); |
||||
if (req.body.path) { |
||||
dir += req.body.path; |
||||
} |
||||
if (req.body.name) { |
||||
dir += `/${name_folder}`; |
||||
} |
||||
if (!fs.existsSync(dir)) { |
||||
uploadAdapter.createFolder({ path: dir }); |
||||
} |
||||
return res.json({ code: 0, message: 'success' }); |
||||
}; |
||||
|
||||
/** |
||||
* get file and folder |
||||
* |
||||
* @param {Formdata} file |
||||
*/ |
||||
exports.update = (req, res, next) => { |
||||
try { |
||||
const oldPath = req.body.oldPath.replace(cdnConfig.uri, storageConfig.uri); |
||||
const newPath = req.body.newPath.replace(cdnConfig.uri, storageConfig.uri); |
||||
|
||||
fs.rename(oldPath, newPath, (err) => { |
||||
if (err) { |
||||
console.log(err); |
||||
return res.status(400).json({ code: 400, message: 'lỗi' }); |
||||
} |
||||
return res.json({ code: 0, message: 'success' }); |
||||
}); |
||||
return null; |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
|
||||
exports.delete = (req, res, next) => { |
||||
try { |
||||
const user = req.user; |
||||
const dir = `${storageConfig.uri_backup}/${user.id}`; |
||||
multer({ dest: `${dir}` }); |
||||
const path = req.body.path.replace(cdnConfig.uri, storageConfig.uri); |
||||
const newpath = req.body.path.replace(cdnConfig.uri, storageConfig.uri_backup); |
||||
// fs.rm(path, { recursive: true }, err => {
|
||||
// if (err) {
|
||||
// return res.status(400).json({ code: 400, message: 'lỗi', detail: err });
|
||||
// }
|
||||
// return res.json({ code: 0, message: 'success' });
|
||||
// });
|
||||
fs.rename(path, newpath, (err) => { |
||||
if (err) throw err; |
||||
return res.json({ code: 0, message: 'success' }); |
||||
}); |
||||
return null; |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
exports.download = async (req, res, next) => { |
||||
try { |
||||
|
||||
const user = req.user; |
||||
const namefile = `${user.name}-${Date.now()}.zip`; |
||||
const dir = `${storageConfig.uri}/download/${user.id}/${namefile}`; |
||||
const folder = `${storageConfig.uri}/download/${user.id}`; |
||||
multer({ dest: `${folder}` }); |
||||
await deleteFolderRecursive(folder); |
||||
const output = fs.createWriteStream(dir); |
||||
const archive = archiver('zip', { |
||||
zlib: { level: 9 } // Sets the compression level.
|
||||
}); |
||||
|
||||
const user_infor = { |
||||
id: user.id, |
||||
name: user.name |
||||
} |
||||
archive.pipe(output); |
||||
if (req.body.data) { |
||||
req.body.data.forEach( async (e) => { |
||||
// const path = e.path.replace(cdnConfig.uri, storageConfig.uri).replace(/ /g, '\\ ');
|
||||
// // // const path = e.path.replace(cdnConfig.uri, storageConfig.uri);
|
||||
// console.log('path: ', path);
|
||||
let downnload_count_list = new Map(); |
||||
|
||||
await File.findOne({ |
||||
where: {
|
||||
name: e.name, |
||||
url : e.path, |
||||
created_by: user_infor, |
||||
is_active : true |
||||
} |
||||
}).then(result => { |
||||
// plus one to the download count when download each selected file
|
||||
downnload_count_list.set( result.id,result.download_count + 1); |
||||
|
||||
const storage = result.url.replace(cdnConfig.uri, storageConfig.uri).replace(/ /g, '\\ '); |
||||
if (e.isFolder) { |
||||
archive.directory(storage, e.name); |
||||
} else { |
||||
archive.file(storage, { name: e.name }); |
||||
} |
||||
archive.finalize(); |
||||
next() |
||||
|
||||
}).catch(ex => { |
||||
console.log(ex); |
||||
next() |
||||
|
||||
// ErrorHandel(ex, req, res, next);
|
||||
});
|
||||
|
||||
|
||||
downnload_count_list.forEach(async (value,key) => { |
||||
const new_download_count_value = {download_count : value}; |
||||
|
||||
await File.update( |
||||
new_download_count_value, { |
||||
where : { |
||||
id : key, |
||||
is_active: true |
||||
} |
||||
} |
||||
|
||||
).then(result => { |
||||
console.log("download count add success"); |
||||
next() |
||||
}).catch(ex => { |
||||
console.log("error2"); |
||||
next() |
||||
|
||||
// ErrorHandel(ex, req, res, next);
|
||||
}) |
||||
}) |
||||
|
||||
}); |
||||
} |
||||
|
||||
|
||||
return res.json({ |
||||
code: 0, |
||||
data: { |
||||
name: namefile, |
||||
path: `${cdnConfig.uri}/download/${user.id}/${namefile}` |
||||
} |
||||
}); |
||||
|
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
|
||||
exports.forceDelete = (req, res, next) => { |
||||
try { |
||||
const path = req.body.path.replace(cdnConfig.uri, storageConfig.uri); |
||||
// const newpath = req.body.path.replace(cdnConfig.uri, storageConfig.uri_backup);
|
||||
fs.rm(path, { recursive: true }, err => { |
||||
if (err) { |
||||
return res.status(400).json({ code: 400, message: 'lỗi', detail: err }); |
||||
} |
||||
return res.json({ code: 0, message: 'success' }); |
||||
}); |
||||
|
||||
return null; |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
exports.deleteMultiple = (req, res, next) => { |
||||
try { |
||||
// const dir = `${storageConfig.uri_backup}/${user.id}/${Date.now()}`;
|
||||
// multer({ dest: `${dir}` });
|
||||
if (req.body.data) { |
||||
req.body.data.forEach(async (e) => { |
||||
|
||||
|
||||
// const path = e.path.replace(cdnConfig.uri, storageConfig.uri);
|
||||
// let newpath = e.path.replace(cdnConfig.uri, storageConfig.uri_backup);
|
||||
// const split = newpath.split('/');
|
||||
// newpath = `${dir}/${split[split.length - 1]}`;
|
||||
// fs.rename(path, newpath, (err) => {
|
||||
// if (err) throw err;
|
||||
// return { code: 0, message: 'success' };
|
||||
// });
|
||||
console.log(e.path); |
||||
const user = req.user; |
||||
const user_infor = { |
||||
id: user.id, |
||||
name: user.name |
||||
} |
||||
const data = {is_active : false} |
||||
await File.update( |
||||
data, |
||||
{ |
||||
where: { |
||||
name: e.name, |
||||
created_by: user_infor, |
||||
url : e.path |
||||
} |
||||
} |
||||
).then(result => { |
||||
console.log("success"); |
||||
}).catch(ex => { |
||||
console.log(ex); |
||||
|
||||
}) |
||||
|
||||
// console.log(e);
|
||||
|
||||
}); |
||||
} |
||||
return res.json({ code: 0, message: messages.REMOVE_SUCCESS }); |
||||
} catch (ex) { |
||||
return ErrorHandel(ex, req, res, next); |
||||
} |
||||
}; |
||||
|
||||
|
||||
|
||||
|
@ -1,189 +1,190 @@ |
||||
import { hash } from 'bcryptjs'; |
||||
import { pick } from 'lodash'; |
||||
// import httpStatus from 'http-status';
|
||||
import messages from '../../../config/messages'; |
||||
// import { hash, compare } from 'bcryptjs';
|
||||
import { handler as ErrorHandler } from '../../middlewares/error'; |
||||
import User from '../../../common/models/user.model'; |
||||
import uploadAdapter from '../../../common/services/adapters/upload-adapter'; |
||||
/** |
||||
* Create |
||||
* |
||||
* @public |
||||
* @param {StorySchema} body |
||||
* @returns {Promise<StorySchema>, APIException>} |
||||
*/ |
||||
exports.create = async (req, res, next) => { |
||||
// transform data
|
||||
req.body.created_by = pick(req.user, ['id', 'name']); |
||||
const params = req.body; |
||||
params.type = User.Types.INDIVIDUAL; |
||||
params.service = User.Services.INDIVIDUAL; |
||||
// save data
|
||||
await User.create(req.body) |
||||
.then(data => { |
||||
uploadAdapter.createDefaultFolder({ id: data.id }); |
||||
res.json({ |
||||
code: 0, |
||||
message: messages.CREATE_SUCCESS, |
||||
data: User.transform(data) |
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* List |
||||
* |
||||
* @public |
||||
* @param {StorySchema} query |
||||
* @returns {Promise<StorySchema[]>, APIException>} |
||||
*/ |
||||
exports.list = async (req, res, next) => { |
||||
req.query.services = User.Services.USER; |
||||
// console.log(req.query.services);
|
||||
// User.list( {
|
||||
// service : "user",
|
||||
// is_active: true
|
||||
// }
|
||||
// ).then(result => {
|
||||
// res.json({
|
||||
// code: 0,
|
||||
// count: req.totalRecords,
|
||||
// data: result.map(
|
||||
// x => User.transform(x)
|
||||
// )
|
||||
// });
|
||||
// }).catch(ex => {
|
||||
// ErrorHandler(ex, req, res, next);
|
||||
// });
|
||||
|
||||
|
||||
return User.findAll({ |
||||
where : { |
||||
service : "user", |
||||
is_active: true |
||||
} |
||||
}).then(result => { |
||||
res.json({ |
||||
code: 0, |
||||
count: req.totalRecords, |
||||
data: result.map( |
||||
x => User.transform(x) |
||||
) |
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* Detail |
||||
* |
||||
* @public |
||||
* @param {params} userId, |
||||
* @returns {Promise<StorySchema>, APIException>} |
||||
*/ |
||||
exports.get = async (req, res, next) => res.json({ data: User.transform(req.locals.user) }); |
||||
|
||||
/** |
||||
* Update |
||||
* |
||||
* @public |
||||
* @param {params} userId |
||||
* @returns {Promise<any>, APIException>} |
||||
*/ |
||||
exports.update = async (req, res, next) => { |
||||
const { user } = req.locals; |
||||
const dataChanged = User.getChangedProperties(req.body); |
||||
const new_properties = pick(req.body, dataChanged); |
||||
// const updateUser = Object.assign(
|
||||
// user,
|
||||
// pick(req.body, dataChanged)
|
||||
// );
|
||||
|
||||
|
||||
// const currentUser = await User.get(user.id)
|
||||
// console.log(dataChanged);
|
||||
|
||||
return User.update( |
||||
new_properties, |
||||
{ |
||||
where: { |
||||
id: user.id |
||||
} |
||||
} |
||||
).then(() => { |
||||
res.json({ |
||||
code: 0, |
||||
message: messages.UPDATE_SUCCESS, |
||||
// dataChanged: dataChanged
|
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* delete |
||||
* |
||||
* @public |
||||
* @param {params} userId |
||||
* @returns {Promise<any>, APIException>} |
||||
*/ |
||||
exports.delete = async (req, res, next) => { |
||||
const { user } = req.locals; |
||||
// const new_user = Object.assign(
|
||||
// user,
|
||||
// isactive : false
|
||||
// )
|
||||
|
||||
return User.update( |
||||
{ is_active: false}, |
||||
{ |
||||
where: { |
||||
id: user.id |
||||
} |
||||
} |
||||
).then( () => { |
||||
res.json({ |
||||
code: 0, |
||||
message: messages.REMOVE_SUCCESS |
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
}; |
||||
|
||||
exports.getStaffPermission = async (req, res, next) => { |
||||
const { story } = req.locals; |
||||
return res.json({ |
||||
code: 0, |
||||
data: story |
||||
}); |
||||
}; |
||||
exports.updatePassword = async(req,res,next) => {
|
||||
const {new_password} = req.body; |
||||
const {user} = req.locals; |
||||
// console.log(user);
|
||||
if (user) {
|
||||
const rounds = 10; |
||||
const new_pass = await hash(new_password, rounds);
|
||||
return User.update( |
||||
{password: new_pass}, |
||||
{ |
||||
where: { |
||||
id: user.id |
||||
} |
||||
}, |
||||
).then( async () => { |
||||
res.json({
|
||||
code: 0, |
||||
message: messages.UPDATE_SUCCESS |
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
};
|
||||
import { hash } from 'bcryptjs'; |
||||
import { pick } from 'lodash'; |
||||
// import httpStatus from 'http-status';
|
||||
import messages from '../../../config/messages'; |
||||
// import { hash, compare } from 'bcryptjs';
|
||||
import { handler as ErrorHandler } from '../../middlewares/error'; |
||||
import User from '../../../common/models/user.model'; |
||||
import uploadAdapter from '../../../common/services/adapters/upload-adapter'; |
||||
/** |
||||
* Create |
||||
* |
||||
* @public |
||||
* @param {StorySchema} body |
||||
* @returns {Promise<StorySchema>, APIException>} |
||||
*/ |
||||
exports.create = async (req, res, next) => { |
||||
// transform data
|
||||
req.body.created_by = pick(req.user, ['id', 'name']); |
||||
const params = req.body; |
||||
params.type = User.Types.INDIVIDUAL; |
||||
params.service = User.Services.INDIVIDUAL; |
||||
// save data
|
||||
console.log(req.body); |
||||
await User.create(req.body) |
||||
.then(data => { |
||||
uploadAdapter.createDefaultFolder({ id: data.id }); |
||||
res.json({ |
||||
code: 0, |
||||
message: messages.CREATE_SUCCESS, |
||||
data: User.transform(data) |
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* List |
||||
* |
||||
* @public |
||||
* @param {StorySchema} query |
||||
* @returns {Promise<StorySchema[]>, APIException>} |
||||
*/ |
||||
exports.list = async (req, res, next) => { |
||||
req.query.services = User.Services.USER; |
||||
// console.log(req.query.services);
|
||||
// User.list( {
|
||||
// service : "user",
|
||||
// is_active: true
|
||||
// }
|
||||
// ).then(result => {
|
||||
// res.json({
|
||||
// code: 0,
|
||||
// count: req.totalRecords,
|
||||
// data: result.map(
|
||||
// x => User.transform(x)
|
||||
// )
|
||||
// });
|
||||
// }).catch(ex => {
|
||||
// ErrorHandler(ex, req, res, next);
|
||||
// });
|
||||
|
||||
|
||||
return User.findAll({ |
||||
where : { |
||||
service : "user", |
||||
is_active: true |
||||
} |
||||
}).then(result => { |
||||
res.json({ |
||||
code: 0, |
||||
count: req.totalRecords, |
||||
data: result.map( |
||||
x => User.transform(x) |
||||
) |
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* Detail |
||||
* |
||||
* @public |
||||
* @param {params} userId, |
||||
* @returns {Promise<StorySchema>, APIException>} |
||||
*/ |
||||
exports.get = async (req, res, next) => res.json({ data: User.transform(req.locals.user) }); |
||||
|
||||
/** |
||||
* Update |
||||
* |
||||
* @public |
||||
* @param {params} userId |
||||
* @returns {Promise<any>, APIException>} |
||||
*/ |
||||
exports.update = async (req, res, next) => { |
||||
const { user } = req.locals; |
||||
const dataChanged = User.getChangedProperties(req.body); |
||||
const new_properties = pick(req.body, dataChanged); |
||||
// const updateUser = Object.assign(
|
||||
// user,
|
||||
// pick(req.body, dataChanged)
|
||||
// );
|
||||
|
||||
|
||||
// const currentUser = await User.get(user.id)
|
||||
// console.log(dataChanged);
|
||||
|
||||
return User.update( |
||||
new_properties, |
||||
{ |
||||
where: { |
||||
id: user.id |
||||
} |
||||
} |
||||
).then(() => { |
||||
res.json({ |
||||
code: 0, |
||||
message: messages.UPDATE_SUCCESS, |
||||
// dataChanged: dataChanged
|
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* delete |
||||
* |
||||
* @public |
||||
* @param {params} userId |
||||
* @returns {Promise<any>, APIException>} |
||||
*/ |
||||
exports.delete = async (req, res, next) => { |
||||
const { user } = req.locals; |
||||
// const new_user = Object.assign(
|
||||
// user,
|
||||
// isactive : false
|
||||
// )
|
||||
|
||||
return User.update( |
||||
{ is_active: false}, |
||||
{ |
||||
where: { |
||||
id: user.id |
||||
} |
||||
} |
||||
).then( () => { |
||||
res.json({ |
||||
code: 0, |
||||
message: messages.REMOVE_SUCCESS |
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
}; |
||||
|
||||
exports.getStaffPermission = async (req, res, next) => { |
||||
const { story } = req.locals; |
||||
return res.json({ |
||||
code: 0, |
||||
data: story |
||||
}); |
||||
}; |
||||
exports.updatePassword = async(req,res,next) => {
|
||||
const {new_password} = req.body; |
||||
const {user} = req.locals; |
||||
// console.log(user);
|
||||
if (user) {
|
||||
const rounds = 10; |
||||
const new_pass = await hash(new_password, rounds);
|
||||
return User.update( |
||||
{password: new_pass}, |
||||
{ |
||||
where: { |
||||
id: user.id |
||||
} |
||||
}, |
||||
).then( async () => { |
||||
res.json({
|
||||
code: 0, |
||||
message: messages.UPDATE_SUCCESS |
||||
}); |
||||
}).catch(ex => { |
||||
ErrorHandler(ex, req, res, next); |
||||
}); |
||||
};
|
||||
}; |
@ -1,17 +1,23 @@ |
||||
|
||||
import { handler as ErrorHandler } from './errors'; |
||||
import Image from '../../common/models/image.model'; |
||||
/** |
||||
* Load image and append to req. |
||||
* @public |
||||
*/ |
||||
exports.load = async (req, res, next) => { |
||||
try { |
||||
const image = await Image.getImageById(req.params.id); |
||||
req.locals = req.locals ? req.locals : {}; |
||||
req.locals.image = image; |
||||
return next(); |
||||
} catch (error) { |
||||
return ErrorHandler(error, req, res); |
||||
} |
||||
}; |
||||
|
||||
import { handler as ErrorHandler } from './errors'; |
||||
import Image from '../../common/models/image.model'; |
||||
/** |
||||
* Load image and append to req. |
||||
* @public |
||||
*/ |
||||
exports.load = async (req, res, next) => { |
||||
try { |
||||
const image = await Image.getImageById(req.params.id); |
||||
req.locals = req.locals ? req.locals : {}; |
||||
req.locals.image = image; |
||||
return next(); |
||||
} catch (error) { |
||||
return ErrorHandler(error, req, res); |
||||
} |
||||
}; |
||||
|
||||
|
||||
// check wheather user has upload any file
|
||||
exports.checkExist = async(req,res,next) => { |
||||
|
||||
} |
||||
|
@ -1,38 +1,42 @@ |
||||
import express from 'express'; |
||||
// import validate from 'express-validation';
|
||||
import { authorize } from '../../middlewares/auth.middleware'; |
||||
import Permissions from '../../../common/utils/Permissions'; |
||||
|
||||
import { uploader } from '../../../common/services/adapters/upload-adapter'; |
||||
import controller from '../../controllers/v1/image.controller'; |
||||
// import {
|
||||
// uploadValidation
|
||||
// } from '../../validations/v1/image.validation';
|
||||
|
||||
const router = express.Router(); |
||||
|
||||
router |
||||
.route('/upload-single') |
||||
.post( |
||||
// authorize([Permissions.IMAGE_UPLOAD]),
|
||||
// validate(uploadValidation),
|
||||
uploader.single('file'), |
||||
controller.uploadSingle |
||||
); |
||||
|
||||
router |
||||
.route('/upload-multiple') |
||||
.post( |
||||
// authorize([Permissions.IMAGE_UPLOAD]),
|
||||
// validate(uploadValidation),
|
||||
uploader.array('file', 100), |
||||
controller.uploadMultiple |
||||
); |
||||
router |
||||
.route('/upload-file') |
||||
.post( |
||||
// authorize([Permissions.IMAGE_UPLOAD]),
|
||||
controller.uploadFile |
||||
); |
||||
|
||||
export default router; |
||||
import express from 'express'; |
||||
// import validate from 'express-validation';
|
||||
import { authorize } from '../../middlewares/auth.middleware'; |
||||
import Permissions from '../../../common/utils/Permissions'; |
||||
import userMiddleware from '../../middlewares/user.middleware'; |
||||
|
||||
import { uploader } from '../../../common/services/adapters/upload-adapter'; |
||||
import controller from '../../controllers/v1/image.controller'; |
||||
// import {
|
||||
// uploadValidation
|
||||
// } from '../../validations/v1/image.validation';
|
||||
|
||||
const router = express.Router(); |
||||
|
||||
router |
||||
.route('/upload-single') |
||||
.post( |
||||
// authorize([Permissions.IMAGE_UPLOAD]),
|
||||
// validate(uploadValidation),
|
||||
uploader.single('file'), |
||||
controller.uploadSingle |
||||
); |
||||
|
||||
router |
||||
.route('/upload-multiple/:id') |
||||
.post( |
||||
// authorize([Permissions.IMAGE_UPLOAD]),
|
||||
// validate(uploadValidation),
|
||||
userMiddleware.load, |
||||
uploader.array('file', 100), |
||||
controller.uploadMultiple |
||||
); |
||||
router |
||||
.route('/upload-file') |
||||
.post( |
||||
// authorize([Permissions.IMAGE_UPLOAD]),
|
||||
controller.uploadFile |
||||
); |
||||
|
||||
|
||||
|
||||
export default router; |
||||
|
@ -1,51 +1,54 @@ |
||||
import express from 'express'; |
||||
import { authorize } from '../../middlewares/auth.middleware'; |
||||
import controller from '../../controllers/v1/path.controller'; |
||||
import Permissions from '../../../common/utils/Permissions'; |
||||
|
||||
const router = express.Router(); |
||||
|
||||
router |
||||
.route('/') |
||||
.post( |
||||
authorize([Permissions.USER]), |
||||
controller.get |
||||
); |
||||
router |
||||
.route('/create') |
||||
.post( |
||||
authorize([Permissions.USER]), |
||||
controller.create |
||||
); |
||||
router |
||||
.route('/update') |
||||
.put( |
||||
authorize([Permissions.USER]), |
||||
controller.update |
||||
); |
||||
router |
||||
.route('/delete') |
||||
.patch( |
||||
authorize([Permissions.USER]), |
||||
controller.delete |
||||
); |
||||
|
||||
router |
||||
.route('/download') |
||||
.put( |
||||
authorize([Permissions.USER]), |
||||
controller.download |
||||
); |
||||
router |
||||
.route('/force-delete') |
||||
.patch( |
||||
authorize([Permissions.USER]), |
||||
controller.forceDelete |
||||
); |
||||
router |
||||
.route('/delete-multiple') |
||||
.patch( |
||||
authorize([Permissions.USER]), |
||||
controller.deleteMultiple |
||||
); |
||||
export default router; |
||||
import express from 'express'; |
||||
import { authorize } from '../../middlewares/auth.middleware'; |
||||
import controller from '../../controllers/v1/path.controller'; |
||||
import Permissions from '../../../common/utils/Permissions'; |
||||
|
||||
const router = express.Router(); |
||||
|
||||
router |
||||
.route('/') |
||||
.post( |
||||
authorize([Permissions.USER]), |
||||
controller.get |
||||
); |
||||
router |
||||
.route('/create') |
||||
.post( |
||||
authorize([Permissions.USER]), |
||||
controller.create |
||||
); |
||||
router |
||||
.route('/update') |
||||
.put( |
||||
authorize([Permissions.USER]), |
||||
controller.update |
||||
); |
||||
router |
||||
.route('/delete') |
||||
.patch( |
||||
authorize([Permissions.USER]), |
||||
controller.delete |
||||
); |
||||
|
||||
router |
||||
.route('/download') |
||||
.put( |
||||
authorize([Permissions.USER]), |
||||
controller.download |
||||
); |
||||
router |
||||
.route('/force-delete') |
||||
.patch( |
||||
authorize([Permissions.USER]), |
||||
controller.forceDelete |
||||
); |
||||
router |
||||
.route('/delete-multiple') |
||||
.patch( |
||||
authorize([Permissions.USER]), |
||||
controller.deleteMultiple |
||||
); |
||||
|
||||
|
||||
|
||||
export default router; |
||||
|
@ -1,408 +1,409 @@ |
||||
/* eslint-disable camelcase */ |
||||
import httpStatus from 'http-status'; |
||||
import { Model, DataTypes, Op } from 'sequelize'; |
||||
import { isEqual, isNil, isUndefined, omitBy, pick } from 'lodash'; |
||||
import moment from 'moment-timezone'; |
||||
|
||||
import { serviceName } from '../../config/vars'; |
||||
import postgres from '../../config/postgres'; |
||||
import APIError from '../utils/APIException'; |
||||
|
||||
/** |
||||
* Create connection |
||||
*/ |
||||
const { sequelize } = postgres; |
||||
class FileConfig extends Model { } |
||||
|
||||
const PUBLIC_FIELDS = [ |
||||
'name', |
||||
'type', |
||||
'group', |
||||
'config' |
||||
]; |
||||
|
||||
FileConfig.Groups = { |
||||
PRODUCT: 'product', |
||||
PRODUCT_OPTION: 'product-option', |
||||
PRODUCT_PRICE: 'product-price', |
||||
ORDER: 'order', |
||||
ORDER_NESTED: 'order-nested', |
||||
INVOICE: 'invoice', |
||||
INVOICE_NESTED: 'invoice-nested', |
||||
RETURN: 'return', |
||||
RETURN_NESTED: 'return-nested', |
||||
DELIVERY: 'delivery', |
||||
IMPORT: 'import', |
||||
IMPORT_NESTED: 'import-nested', |
||||
STOCK_TAKE: 'stock-take', |
||||
STOCK_TAKE_NESTED: 'stock-take-nested', |
||||
TRANSFER: 'transfer', |
||||
TRANSFER_NESTED: 'transfer-nested', |
||||
EXPORT: 'export', |
||||
EXPORT_NESTED: 'export-nested', |
||||
PAYMENT: 'payment', |
||||
CUSTOMER: 'customer', |
||||
SUPPLIER: 'supplier', |
||||
DELIVERY_PAYMENT: 'delivery-payment', |
||||
DELIVERY_PAYMENT_NESTED: 'delivery-payment-nested', |
||||
/** Sale Report */ |
||||
SALE_REPORT_TIME: 'sale-report-time', |
||||
SALE_REPORT_TIME_NESTED: 'sale-report-time-nested', |
||||
SALE_REPORT_TIME_INVOICE: 'sale-report-time-invoice', |
||||
SALE_REPORT_PROFIT: 'sale-report-profit', |
||||
SALE_REPORT_PROFIT_NESTED: 'sale-report-profit-nested', |
||||
SALE_REPORT_PROFIT_INVOICE: 'sale-report-profit-invoice', |
||||
SALE_REPORT_PROFIT_PRODUCT: 'sale-report-profit-product', |
||||
SALE_REPORT_DISCOUNT: 'sale-report-discount', |
||||
SALE_REPORT_DISCOUNT_NESTED: 'sale-report-discount-nested', |
||||
SALE_REPORT_DISCOUNT_INVOICE: 'sale-report-discount-invoice', |
||||
SALE_REPORT_RETURN: 'sale-report-return', |
||||
SALE_REPORT_RETURN_NESTED: 'sale-report-return-nested', |
||||
SALE_REPORT_RETURN_INVOICE: 'sale-report-return-invoice', |
||||
SALE_REPORT_STAFF: 'sale-report-staff', |
||||
SALE_REPORT_STAFF_NESTED: 'sale-report-staff-nested', |
||||
SALE_REPORT_STAFF_TIME: 'sale-report-staff-time', |
||||
SALE_REPORT_STAFF_TIME_NESTED: 'sale-report-staff-time-nested', |
||||
SALE_REPORT_STAFF_INVOICE: 'sale-report-staff-invoice', |
||||
SALE_REPORT_STORE: 'sale-report-store', |
||||
SALE_REPORT_STORE_NESTED: 'sale-report-store-nested', |
||||
SALE_REPORT_STORE_TIME: 'sale-report-store-time', |
||||
SALE_REPORT_STORE_TIME_NESTED: 'sale-report-store-time-nested', |
||||
SALE_REPORT_STORE_INVOICE: 'sale-report-store-invoice', |
||||
|
||||
/** product report */ |
||||
PRODUCT_REPORT_SALE: 'product-report-sale', |
||||
PRODUCT_REPORT_SALE_SPECIFIC: 'product-report-sale-specific', |
||||
PRODUCT_REPORT_SALE_GROUP_CATEGORIES: 'product-report-sale-group-categories', |
||||
PRODUCT_REPORT_SALE_DETAIL: 'product-report-sale-detail', |
||||
|
||||
PRODUCT_REPORT_PROFIT: 'product-report-profit', |
||||
PRODUCT_REPORT_PROFIT_GROUP_CATEGORIES: 'product-report-profit-group-categories', |
||||
|
||||
PRODUCT_REPORT_STOCK_VALUE: 'product-report-stock-value', |
||||
PRODUCT_REPORT_STOCK_VALUE_GROUP_CATEGORIES: 'product-report-stock-value-group-categories', |
||||
PRODUCT_REPORT_STOCK_VALUE_DETAIL: 'product-report-stock-value-detail', |
||||
PRODUCT_REPORT_STOCK_VALUE_STORE: 'product-report-stock-value-store', |
||||
PRODUCT_REPORT_STOCK_VALUE_GENERAL: 'product-report-stock-value-general', |
||||
|
||||
PRODUCT_REPORT_STOCK: 'product-report-stock', |
||||
PRODUCT_REPORT_STOCK_STORE: 'product-report-stock-store', |
||||
PRODUCT_REPORT_STOCK_ONE_STORE: 'product-report-stock-one-store', |
||||
PRODUCT_REPORT_STOCK_GROUP_CATEGORIES: 'product-report-stock-group-categories', |
||||
PRODUCT_REPORT_STOCK_MORE_STORE: 'product-report-stock-more-store', |
||||
|
||||
PRODUCT_REPORT_STOCK_DETAIL: 'product-report-stock-detail', |
||||
PRODUCT_REPORT_STOCK_DETAIL_STORE: 'product-report-stock-detail-store', |
||||
PRODUCT_REPORT_STOCK_DETAIL_DETAIL: 'product-report-stock-detail-detail', |
||||
PRODUCT_REPORT_STOCK_DETAIL_GROUP_CATEGORIES: 'product-report-stock-detail-group-categories', |
||||
PRODUCT_REPORT_STOCK_DETAIL_GENERAL: 'product-report-stock-detail-general', |
||||
|
||||
PRODUCT_REPORT_STAFF: 'product-report-staff', |
||||
PRODUCT_REPORT_STAFF_SPECIFIC: 'product-report-staff-specific', |
||||
PRODUCT_REPORT_STAFF_DETAIL: 'product-report-staff-detail', |
||||
PRODUCT_REPORT_STAFF_GROUP_CATEGORIES: 'product-report-staff-group-categories', |
||||
|
||||
PRODUCT_REPORT_EXPORT: 'product-report-export', |
||||
PRODUCT_REPORT_EXPORT_SPECIFIC: 'product-report-export-specific', |
||||
PRODUCT_REPORT_EXPORT_DETAIL: 'product-report-export-detail', |
||||
PRODUCT_REPORT_EXPORT_GROUP_CATEGORIES: 'product-report-export-group-categories', |
||||
|
||||
PRODUCT_REPORT_CUSTOMER: 'product-report-customer', |
||||
PRODUCT_REPORT_CUSTOMER_SPECIFIC: 'product-report-customer-specific', |
||||
PRODUCT_REPORT_CUSTOMER_DETAIL: 'product-report-customer-detail', |
||||
PRODUCT_REPORT_CUSTOMER_GROUP_CATEGORIES: 'product-report-customer-group-categories', |
||||
|
||||
PRODUCT_REPORT_SUPPLIER: 'product-report-supplier', |
||||
PRODUCT_REPORT_SUPPLIER_DETAIL: 'product-report-supplier-detail', |
||||
PRODUCT_REPORT_SUPPLIER_SPECIFIC: 'product-report-supplier-specific', |
||||
PRODUCT_REPORT_SUPPLIER_GROUP_CATEGORIES: 'product-report-supplier-group-categories', |
||||
|
||||
PRODUCT_REPORT_STOCK_SPECIFIC: 'product-report-stock-specific', |
||||
PRODUCT_REPORT_STOCK_DETAIL_SPECIFIC: 'product-report-stock-detail-specific', |
||||
PRODUCT_REPORT_CUSTOMER_INVOICE: 'product-report-customer-invoice', |
||||
PRODUCT_REPORT_STAFF_INVOICE: 'product-report-staff-invoice', |
||||
|
||||
/** start end of day report */ |
||||
END_OF_DAY_REPORT_SALE: 'end-of-day-report-sale', |
||||
END_OF_DAY_REPORT_SALE_DETAIL: 'end-of-day-report-sale-detail', |
||||
END_OF_DAY_REPORT_SALE_DETAIL_TIME: 'end-of-day-report-sale-detail-time', |
||||
END_OF_DAY_REPORT_PAYMENT: 'end-of-day-report-payment', |
||||
END_OF_DAY_REPORT_PRODUCT: 'end-of-day-report-product', |
||||
END_OF_DAY_REPORT_PRODUCT_DETAIL: 'end-of-day-report-product-detail', |
||||
}; |
||||
|
||||
FileConfig.Types = { |
||||
IMPORT: 'import', |
||||
EXPORT: 'export' |
||||
}; |
||||
|
||||
FileConfig.Configs = [ |
||||
'name', // file name,
|
||||
'path', // file path,
|
||||
'content', // file data
|
||||
]; |
||||
|
||||
/** |
||||
* FileConfig Schema |
||||
* @public |
||||
*/ |
||||
FileConfig.init( |
||||
{ |
||||
id: { |
||||
type: DataTypes.INTEGER, |
||||
autoIncrement: true, |
||||
primaryKey: true |
||||
}, |
||||
name: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
type: { |
||||
type: DataTypes.STRING(50), |
||||
allowNull: false |
||||
}, |
||||
group: { |
||||
type: DataTypes.STRING(100), |
||||
defaultValue: null |
||||
}, |
||||
path: { |
||||
type: DataTypes.STRING(155), |
||||
defaultValue: null |
||||
}, |
||||
config: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null |
||||
}, |
||||
|
||||
// manager
|
||||
is_active: { |
||||
type: DataTypes.BOOLEAN, |
||||
defaultValue: true |
||||
}, |
||||
created_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
updated_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
created_by: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | name
|
||||
} |
||||
}, |
||||
{ |
||||
timestamps: false, |
||||
schema: serviceName, |
||||
sequelize: sequelize, |
||||
modelName: 'file_config', |
||||
tableName: 'tbl_file_configs' |
||||
} |
||||
); |
||||
|
||||
/** |
||||
* Register event emiter |
||||
*/ |
||||
FileConfig.Events = { |
||||
FILE_CONFIG_CREATED: `${serviceName}.file-config.created`, |
||||
FILE_CONFIG_UPDATED: `${serviceName}.file-config.updated`, |
||||
FILE_CONFIG_DELETED: `${serviceName}.file-config.deleted`, |
||||
}; |
||||
FileConfig.EVENT_SOURCE = `${serviceName}.file-config`; |
||||
|
||||
/** |
||||
* Add your |
||||
* - pre-save hooks |
||||
* - validations |
||||
* - virtuals |
||||
*/ |
||||
FileConfig.addHook('afterCreate', () => { }); |
||||
|
||||
FileConfig.addHook('afterUpdate', () => { }); |
||||
|
||||
FileConfig.addHook('afterDestroy', () => { }); |
||||
|
||||
/** |
||||
* Load query |
||||
* @param {*} params |
||||
*/ |
||||
function filterConditions(params) { |
||||
const options = omitBy(params, isNil); |
||||
options.is_active = true; |
||||
|
||||
// TODO: load condition
|
||||
if (options.name) { |
||||
options.name = { |
||||
[Op.iLike]: `%${options.name}%` |
||||
}; |
||||
} |
||||
|
||||
return options; |
||||
} |
||||
|
||||
/** |
||||
* Load sort query |
||||
* @param {*} sort_by |
||||
* @param {*} order_by |
||||
*/ |
||||
function sortConditions({ sort_by, order_by }) { |
||||
let sort = null; |
||||
switch (sort_by) { |
||||
case 'created_at': |
||||
sort = ['created_at', order_by]; |
||||
break; |
||||
case 'updated_at': |
||||
sort = ['updated_at', order_by]; |
||||
break; |
||||
default: sort = ['created_at', 'DESC']; |
||||
break; |
||||
} |
||||
return sort; |
||||
} |
||||
|
||||
/** |
||||
* Transform postgres model to expose object |
||||
*/ |
||||
FileConfig.transform = (params) => { |
||||
const transformed = {}; |
||||
const fields = [ |
||||
'id', |
||||
'name', |
||||
'type', |
||||
'group', |
||||
'config', |
||||
'created_by' |
||||
]; |
||||
fields.forEach((field) => { |
||||
transformed[field] = params[field]; |
||||
}); |
||||
|
||||
// pipe date
|
||||
const dateFields = [ |
||||
'created_at', |
||||
'updated_at' |
||||
]; |
||||
dateFields.forEach((field) => { |
||||
if (params[field]) { |
||||
transformed[field] = moment(params[field]).unix(); |
||||
} else { |
||||
transformed[field] = null; |
||||
} |
||||
}); |
||||
|
||||
return transformed; |
||||
}; |
||||
|
||||
/** |
||||
* Get all changed properties |
||||
*/ |
||||
FileConfig.getChangedProperties = ({ newModel, oldModel }) => { |
||||
const changedProperties = []; |
||||
const allChangableProperties = [ |
||||
'name', |
||||
'type', |
||||
'group', |
||||
'config', |
||||
'status', |
||||
'status_name' |
||||
]; |
||||
if (!oldModel) { |
||||
return allChangableProperties; |
||||
} |
||||
|
||||
allChangableProperties.forEach((field) => { |
||||
if ( |
||||
!isUndefined(newModel[field]) && |
||||
!isEqual(newModel[field], oldModel[field]) |
||||
) { |
||||
changedProperties.push(field); |
||||
} |
||||
}); |
||||
|
||||
return changedProperties; |
||||
}; |
||||
|
||||
/** |
||||
* Detail |
||||
* |
||||
* @public |
||||
* @param {string} group |
||||
*/ |
||||
FileConfig.get = async (operation) => { |
||||
try { |
||||
const data = await FileConfig.findOne({ |
||||
where: { |
||||
type: operation.type, |
||||
group: operation.group, |
||||
is_active: true |
||||
} |
||||
}); |
||||
if (!data) { |
||||
throw new APIError({ |
||||
status: httpStatus.NOT_FOUND, |
||||
message: 'Không tìm thấy file!' |
||||
}); |
||||
} |
||||
return data; |
||||
} catch (ex) { |
||||
throw ex; |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* List users in descending order of 'createdAt' timestamp. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Supplider[]>} |
||||
*/ |
||||
FileConfig.list = async ({ |
||||
name, |
||||
|
||||
// sort
|
||||
sort_by, |
||||
order_by, |
||||
skip = 0, |
||||
limit = 20, |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
const sort = sortConditions({ sort_by, order_by }); |
||||
return FileConfig.findAll({ |
||||
where: options, |
||||
order: [sort], |
||||
offset: skip, |
||||
limit: limit |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* Total records. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Number>} |
||||
*/ |
||||
FileConfig.totalRecords = ({ |
||||
name |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
|
||||
return FileConfig.count({ where: options }); |
||||
}; |
||||
|
||||
/** |
||||
* Filter only allowed fields from Province |
||||
* |
||||
* @param {Object} params |
||||
*/ |
||||
FileConfig.filterParams = (params) => pick(params, PUBLIC_FIELDS); |
||||
|
||||
/** |
||||
* @typedef Province |
||||
*/ |
||||
export default FileConfig; |
||||
/* eslint-disable camelcase */ |
||||
import httpStatus from 'http-status'; |
||||
import { Model, DataTypes, Op } from 'sequelize'; |
||||
import { isEqual, isNil, isUndefined, omitBy, pick } from 'lodash'; |
||||
import moment from 'moment-timezone'; |
||||
|
||||
import { serviceName } from '../../config/vars'; |
||||
import postgres from '../../config/postgres'; |
||||
import APIError from '../utils/APIException'; |
||||
|
||||
/** |
||||
* Create connection |
||||
*/ |
||||
const { sequelize } = postgres; |
||||
class FileConfig extends Model { } |
||||
|
||||
const PUBLIC_FIELDS = [ |
||||
'name', |
||||
'type', |
||||
'group', |
||||
'config' |
||||
]; |
||||
|
||||
FileConfig.Groups = { |
||||
PRODUCT: 'product', |
||||
PRODUCT_OPTION: 'product-option', |
||||
PRODUCT_PRICE: 'product-price', |
||||
ORDER: 'order', |
||||
ORDER_NESTED: 'order-nested', |
||||
INVOICE: 'invoice', |
||||
INVOICE_NESTED: 'invoice-nested', |
||||
RETURN: 'return', |
||||
RETURN_NESTED: 'return-nested', |
||||
DELIVERY: 'delivery', |
||||
IMPORT: 'import', |
||||
IMPORT_NESTED: 'import-nested', |
||||
STOCK_TAKE: 'stock-take', |
||||
STOCK_TAKE_NESTED: 'stock-take-nested', |
||||
TRANSFER: 'transfer', |
||||
TRANSFER_NESTED: 'transfer-nested', |
||||
EXPORT: 'export', |
||||
EXPORT_NESTED: 'export-nested', |
||||
PAYMENT: 'payment', |
||||
CUSTOMER: 'customer', |
||||
SUPPLIER: 'supplier', |
||||
DELIVERY_PAYMENT: 'delivery-payment', |
||||
DELIVERY_PAYMENT_NESTED: 'delivery-payment-nested', |
||||
/** Sale Report */ |
||||
SALE_REPORT_TIME: 'sale-report-time', |
||||
SALE_REPORT_TIME_NESTED: 'sale-report-time-nested', |
||||
SALE_REPORT_TIME_INVOICE: 'sale-report-time-invoice', |
||||
SALE_REPORT_PROFIT: 'sale-report-profit', |
||||
SALE_REPORT_PROFIT_NESTED: 'sale-report-profit-nested', |
||||
SALE_REPORT_PROFIT_INVOICE: 'sale-report-profit-invoice', |
||||
SALE_REPORT_PROFIT_PRODUCT: 'sale-report-profit-product', |
||||
SALE_REPORT_DISCOUNT: 'sale-report-discount', |
||||
SALE_REPORT_DISCOUNT_NESTED: 'sale-report-discount-nested', |
||||
SALE_REPORT_DISCOUNT_INVOICE: 'sale-report-discount-invoice', |
||||
SALE_REPORT_RETURN: 'sale-report-return', |
||||
SALE_REPORT_RETURN_NESTED: 'sale-report-return-nested', |
||||
SALE_REPORT_RETURN_INVOICE: 'sale-report-return-invoice', |
||||
SALE_REPORT_STAFF: 'sale-report-staff', |
||||
SALE_REPORT_STAFF_NESTED: 'sale-report-staff-nested', |
||||
SALE_REPORT_STAFF_TIME: 'sale-report-staff-time', |
||||
SALE_REPORT_STAFF_TIME_NESTED: 'sale-report-staff-time-nested', |
||||
SALE_REPORT_STAFF_INVOICE: 'sale-report-staff-invoice', |
||||
SALE_REPORT_STORE: 'sale-report-store', |
||||
SALE_REPORT_STORE_NESTED: 'sale-report-store-nested', |
||||
SALE_REPORT_STORE_TIME: 'sale-report-store-time', |
||||
SALE_REPORT_STORE_TIME_NESTED: 'sale-report-store-time-nested', |
||||
SALE_REPORT_STORE_INVOICE: 'sale-report-store-invoice', |
||||
|
||||
/** product report */ |
||||
PRODUCT_REPORT_SALE: 'product-report-sale', |
||||
PRODUCT_REPORT_SALE_SPECIFIC: 'product-report-sale-specific', |
||||
PRODUCT_REPORT_SALE_GROUP_CATEGORIES: 'product-report-sale-group-categories', |
||||
PRODUCT_REPORT_SALE_DETAIL: 'product-report-sale-detail', |
||||
|
||||
PRODUCT_REPORT_PROFIT: 'product-report-profit', |
||||
PRODUCT_REPORT_PROFIT_GROUP_CATEGORIES: 'product-report-profit-group-categories', |
||||
|
||||
PRODUCT_REPORT_STOCK_VALUE: 'product-report-stock-value', |
||||
PRODUCT_REPORT_STOCK_VALUE_GROUP_CATEGORIES: 'product-report-stock-value-group-categories', |
||||
PRODUCT_REPORT_STOCK_VALUE_DETAIL: 'product-report-stock-value-detail', |
||||
PRODUCT_REPORT_STOCK_VALUE_STORE: 'product-report-stock-value-store', |
||||
PRODUCT_REPORT_STOCK_VALUE_GENERAL: 'product-report-stock-value-general', |
||||
|
||||
PRODUCT_REPORT_STOCK: 'product-report-stock', |
||||
PRODUCT_REPORT_STOCK_STORE: 'product-report-stock-store', |
||||
PRODUCT_REPORT_STOCK_ONE_STORE: 'product-report-stock-one-store', |
||||
PRODUCT_REPORT_STOCK_GROUP_CATEGORIES: 'product-report-stock-group-categories', |
||||
PRODUCT_REPORT_STOCK_MORE_STORE: 'product-report-stock-more-store', |
||||
|
||||
PRODUCT_REPORT_STOCK_DETAIL: 'product-report-stock-detail', |
||||
PRODUCT_REPORT_STOCK_DETAIL_STORE: 'product-report-stock-detail-store', |
||||
PRODUCT_REPORT_STOCK_DETAIL_DETAIL: 'product-report-stock-detail-detail', |
||||
PRODUCT_REPORT_STOCK_DETAIL_GROUP_CATEGORIES: 'product-report-stock-detail-group-categories', |
||||
PRODUCT_REPORT_STOCK_DETAIL_GENERAL: 'product-report-stock-detail-general', |
||||
|
||||
PRODUCT_REPORT_STAFF: 'product-report-staff', |
||||
PRODUCT_REPORT_STAFF_SPECIFIC: 'product-report-staff-specific', |
||||
PRODUCT_REPORT_STAFF_DETAIL: 'product-report-staff-detail', |
||||
PRODUCT_REPORT_STAFF_GROUP_CATEGORIES: 'product-report-staff-group-categories', |
||||
|
||||
PRODUCT_REPORT_EXPORT: 'product-report-export', |
||||
PRODUCT_REPORT_EXPORT_SPECIFIC: 'product-report-export-specific', |
||||
PRODUCT_REPORT_EXPORT_DETAIL: 'product-report-export-detail', |
||||
PRODUCT_REPORT_EXPORT_GROUP_CATEGORIES: 'product-report-export-group-categories', |
||||
|
||||
PRODUCT_REPORT_CUSTOMER: 'product-report-customer', |
||||
PRODUCT_REPORT_CUSTOMER_SPECIFIC: 'product-report-customer-specific', |
||||
PRODUCT_REPORT_CUSTOMER_DETAIL: 'product-report-customer-detail', |
||||
PRODUCT_REPORT_CUSTOMER_GROUP_CATEGORIES: 'product-report-customer-group-categories', |
||||
|
||||
PRODUCT_REPORT_SUPPLIER: 'product-report-supplier', |
||||
PRODUCT_REPORT_SUPPLIER_DETAIL: 'product-report-supplier-detail', |
||||
PRODUCT_REPORT_SUPPLIER_SPECIFIC: 'product-report-supplier-specific', |
||||
PRODUCT_REPORT_SUPPLIER_GROUP_CATEGORIES: 'product-report-supplier-group-categories', |
||||
|
||||
PRODUCT_REPORT_STOCK_SPECIFIC: 'product-report-stock-specific', |
||||
PRODUCT_REPORT_STOCK_DETAIL_SPECIFIC: 'product-report-stock-detail-specific', |
||||
PRODUCT_REPORT_CUSTOMER_INVOICE: 'product-report-customer-invoice', |
||||
PRODUCT_REPORT_STAFF_INVOICE: 'product-report-staff-invoice', |
||||
|
||||
/** start end of day report */ |
||||
END_OF_DAY_REPORT_SALE: 'end-of-day-report-sale', |
||||
END_OF_DAY_REPORT_SALE_DETAIL: 'end-of-day-report-sale-detail', |
||||
END_OF_DAY_REPORT_SALE_DETAIL_TIME: 'end-of-day-report-sale-detail-time', |
||||
END_OF_DAY_REPORT_PAYMENT: 'end-of-day-report-payment', |
||||
END_OF_DAY_REPORT_PRODUCT: 'end-of-day-report-product', |
||||
END_OF_DAY_REPORT_PRODUCT_DETAIL: 'end-of-day-report-product-detail', |
||||
}; |
||||
|
||||
FileConfig.Types = { |
||||
IMPORT: 'import', |
||||
EXPORT: 'export' |
||||
}; |
||||
|
||||
FileConfig.Configs = [ |
||||
'name', // file name,
|
||||
'path', // file path,
|
||||
'content', // file data
|
||||
]; |
||||
|
||||
/** |
||||
* FileConfig Schema |
||||
* @public |
||||
*/ |
||||
FileConfig.init( |
||||
{ |
||||
id: { |
||||
type: DataTypes.INTEGER, |
||||
autoIncrement: true, |
||||
primaryKey: true |
||||
}, |
||||
name: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
type: { |
||||
type: DataTypes.STRING(50), |
||||
allowNull: false |
||||
}, |
||||
group: { |
||||
type: DataTypes.STRING(100), |
||||
defaultValue: null |
||||
}, |
||||
path: { |
||||
type: DataTypes.STRING(155), |
||||
defaultValue: null |
||||
}, |
||||
config: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null |
||||
}, |
||||
|
||||
// manager
|
||||
is_active: { |
||||
type: DataTypes.BOOLEAN, |
||||
defaultValue: true |
||||
}, |
||||
created_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
updated_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
created_by: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | name
|
||||
}, |
||||
|
||||
}, |
||||
{ |
||||
timestamps: false, |
||||
schema: serviceName, |
||||
sequelize: sequelize, |
||||
modelName: 'file_config', |
||||
tableName: 'tbl_file_configs' |
||||
} |
||||
); |
||||
|
||||
/** |
||||
* Register event emiter |
||||
*/ |
||||
FileConfig.Events = { |
||||
FILE_CONFIG_CREATED: `${serviceName}.file-config.created`, |
||||
FILE_CONFIG_UPDATED: `${serviceName}.file-config.updated`, |
||||
FILE_CONFIG_DELETED: `${serviceName}.file-config.deleted`, |
||||
}; |
||||
FileConfig.EVENT_SOURCE = `${serviceName}.file-config`; |
||||
|
||||
/** |
||||
* Add your |
||||
* - pre-save hooks |
||||
* - validations |
||||
* - virtuals |
||||
*/ |
||||
FileConfig.addHook('afterCreate', () => { }); |
||||
|
||||
FileConfig.addHook('afterUpdate', () => { }); |
||||
|
||||
FileConfig.addHook('afterDestroy', () => { }); |
||||
|
||||
/** |
||||
* Load query |
||||
* @param {*} params |
||||
*/ |
||||
function filterConditions(params) { |
||||
const options = omitBy(params, isNil); |
||||
options.is_active = true; |
||||
|
||||
// TODO: load condition
|
||||
if (options.name) { |
||||
options.name = { |
||||
[Op.iLike]: `%${options.name}%` |
||||
}; |
||||
} |
||||
|
||||
return options; |
||||
} |
||||
|
||||
/** |
||||
* Load sort query |
||||
* @param {*} sort_by |
||||
* @param {*} order_by |
||||
*/ |
||||
function sortConditions({ sort_by, order_by }) { |
||||
let sort = null; |
||||
switch (sort_by) { |
||||
case 'created_at': |
||||
sort = ['created_at', order_by]; |
||||
break; |
||||
case 'updated_at': |
||||
sort = ['updated_at', order_by]; |
||||
break; |
||||
default: sort = ['created_at', 'DESC']; |
||||
break; |
||||
} |
||||
return sort; |
||||
} |
||||
|
||||
/** |
||||
* Transform postgres model to expose object |
||||
*/ |
||||
FileConfig.transform = (params) => { |
||||
const transformed = {}; |
||||
const fields = [ |
||||
'id', |
||||
'name', |
||||
'type', |
||||
'group', |
||||
'config', |
||||
'created_by' |
||||
]; |
||||
fields.forEach((field) => { |
||||
transformed[field] = params[field]; |
||||
}); |
||||
|
||||
// pipe date
|
||||
const dateFields = [ |
||||
'created_at', |
||||
'updated_at' |
||||
]; |
||||
dateFields.forEach((field) => { |
||||
if (params[field]) { |
||||
transformed[field] = moment(params[field]).unix(); |
||||
} else { |
||||
transformed[field] = null; |
||||
} |
||||
}); |
||||
|
||||
return transformed; |
||||
}; |
||||
|
||||
/** |
||||
* Get all changed properties |
||||
*/ |
||||
FileConfig.getChangedProperties = ({ newModel, oldModel }) => { |
||||
const changedProperties = []; |
||||
const allChangableProperties = [ |
||||
'name', |
||||
'type', |
||||
'group', |
||||
'config', |
||||
'status', |
||||
'status_name' |
||||
]; |
||||
if (!oldModel) { |
||||
return allChangableProperties; |
||||
} |
||||
|
||||
allChangableProperties.forEach((field) => { |
||||
if ( |
||||
!isUndefined(newModel[field]) && |
||||
!isEqual(newModel[field], oldModel[field]) |
||||
) { |
||||
changedProperties.push(field); |
||||
} |
||||
}); |
||||
|
||||
return changedProperties; |
||||
}; |
||||
|
||||
/** |
||||
* Detail |
||||
* |
||||
* @public |
||||
* @param {string} group |
||||
*/ |
||||
FileConfig.get = async (operation) => { |
||||
try { |
||||
const data = await FileConfig.findOne({ |
||||
where: { |
||||
type: operation.type, |
||||
group: operation.group, |
||||
is_active: true |
||||
} |
||||
}); |
||||
if (!data) { |
||||
throw new APIError({ |
||||
status: httpStatus.NOT_FOUND, |
||||
message: 'Không tìm thấy file!' |
||||
}); |
||||
} |
||||
return data; |
||||
} catch (ex) { |
||||
throw ex; |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* List users in descending order of 'createdAt' timestamp. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Supplider[]>} |
||||
*/ |
||||
FileConfig.list = async ({ |
||||
name, |
||||
|
||||
// sort
|
||||
sort_by, |
||||
order_by, |
||||
skip = 0, |
||||
limit = 20, |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
const sort = sortConditions({ sort_by, order_by }); |
||||
return FileConfig.findAll({ |
||||
where: options, |
||||
order: [sort], |
||||
offset: skip, |
||||
limit: limit |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* Total records. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Number>} |
||||
*/ |
||||
FileConfig.totalRecords = ({ |
||||
name |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
|
||||
return FileConfig.count({ where: options }); |
||||
}; |
||||
|
||||
/** |
||||
* Filter only allowed fields from Province |
||||
* |
||||
* @param {Object} params |
||||
*/ |
||||
FileConfig.filterParams = (params) => pick(params, PUBLIC_FIELDS); |
||||
|
||||
/** |
||||
* @typedef Province |
||||
*/ |
||||
export default FileConfig; |
||||
|
@ -1,276 +1,278 @@ |
||||
/* eslint-disable camelcase */ |
||||
import httpStatus from 'http-status'; |
||||
import { Model, DataTypes, Op } from 'sequelize'; |
||||
import { isEqual, isNil, isUndefined, omitBy, pick } from 'lodash'; |
||||
import moment from 'moment-timezone'; |
||||
|
||||
import { serviceName } from '../../config/vars'; |
||||
import postgres from '../../config/postgres'; |
||||
import APIError from '../utils/APIException'; |
||||
|
||||
/** |
||||
* Create connection |
||||
*/ |
||||
const { sequelize } = postgres; |
||||
class File extends Model { } |
||||
|
||||
const PUBLIC_FIELDS = [ |
||||
'name', |
||||
'title', |
||||
'payload' |
||||
]; |
||||
|
||||
/** |
||||
* File Schema |
||||
* @public |
||||
*/ |
||||
File.init( |
||||
{ |
||||
id: { |
||||
type: DataTypes.INTEGER, |
||||
autoIncrement: true, |
||||
primaryKey: true |
||||
}, |
||||
url: { |
||||
type: DataTypes.STRING(255), |
||||
allowNull: false |
||||
}, |
||||
name: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
title: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
payload: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | code | name
|
||||
}, |
||||
|
||||
// manager
|
||||
is_active: { |
||||
type: DataTypes.BOOLEAN, |
||||
defaultValue: true |
||||
}, |
||||
created_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
updated_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
created_by: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | name
|
||||
} |
||||
}, |
||||
{ |
||||
timestamps: false, |
||||
sequelize: sequelize, |
||||
schema: serviceName, |
||||
modelName: 'file', |
||||
tableName: 'tbl_files' |
||||
} |
||||
); |
||||
|
||||
/** |
||||
* Register event emiter |
||||
*/ |
||||
File.Events = { |
||||
File_CREATED: `${serviceName}.file.created`, |
||||
File_UPDATED: `${serviceName}.file.updated`, |
||||
File_DELETED: `${serviceName}.file.deleted`, |
||||
}; |
||||
File.EVENT_SOURCE = `${serviceName}.file`; |
||||
|
||||
/** |
||||
* Add your |
||||
* - pre-save hooks |
||||
* - validations |
||||
* - virtuals |
||||
*/ |
||||
File.addHook('afterCreate', () => { }); |
||||
|
||||
File.addHook('afterUpdate', () => { }); |
||||
|
||||
File.addHook('afterDestroy', () => { }); |
||||
|
||||
/** |
||||
* Load query |
||||
* @param {*} params |
||||
*/ |
||||
function filterConditions(params) { |
||||
const options = omitBy(params, isNil); |
||||
options.is_active = true; |
||||
|
||||
// TODO: load condition
|
||||
if (options.name) { |
||||
options.name = { |
||||
[Op.iLike]: `%${options.name}%` |
||||
}; |
||||
} |
||||
|
||||
return options; |
||||
} |
||||
|
||||
/** |
||||
* Load sort query |
||||
* @param {*} sort_by |
||||
* @param {*} order_by |
||||
*/ |
||||
function sortConditions({ sort_by, order_by }) { |
||||
let sort = null; |
||||
switch (sort_by) { |
||||
case 'created_at': |
||||
sort = ['created_at', order_by]; |
||||
break; |
||||
case 'updated_at': |
||||
sort = ['updated_at', order_by]; |
||||
break; |
||||
default: sort = ['created_at', 'DESC']; |
||||
break; |
||||
} |
||||
return sort; |
||||
} |
||||
|
||||
/** |
||||
* Transform postgres model to expose object |
||||
*/ |
||||
File.transform = (params) => { |
||||
const transformed = {}; |
||||
const fields = [ |
||||
'id', |
||||
'name', |
||||
'payload', |
||||
'created_by' |
||||
]; |
||||
fields.forEach((field) => { |
||||
transformed[field] = params[field]; |
||||
}); |
||||
|
||||
// pipe date
|
||||
const dateFields = [ |
||||
'created_at', |
||||
'updated_at' |
||||
]; |
||||
dateFields.forEach((field) => { |
||||
if (params[field]) { |
||||
transformed[field] = moment(params[field]).unix(); |
||||
} else { |
||||
transformed[field] = null; |
||||
} |
||||
}); |
||||
|
||||
return transformed; |
||||
}; |
||||
|
||||
/** |
||||
* Get all changed properties |
||||
*/ |
||||
File.getChangedProperties = ({ newModel, oldModel }) => { |
||||
const changedProperties = []; |
||||
const allChangableProperties = [ |
||||
'id', |
||||
'name', |
||||
'payload', |
||||
]; |
||||
if (!oldModel) { |
||||
return allChangableProperties; |
||||
} |
||||
|
||||
allChangableProperties.forEach((field) => { |
||||
if ( |
||||
!isUndefined(newModel[field]) && |
||||
!isEqual(newModel[field], oldModel[field]) |
||||
) { |
||||
changedProperties.push(field); |
||||
} |
||||
}); |
||||
|
||||
return changedProperties; |
||||
}; |
||||
|
||||
/** |
||||
* Detail |
||||
* |
||||
* @public |
||||
* @param {string} id |
||||
*/ |
||||
File.get = async (id) => { |
||||
try { |
||||
const data = await File.findOne({ |
||||
where: { |
||||
id, |
||||
is_active: true |
||||
} |
||||
}); |
||||
if (!data) { |
||||
throw new APIError({ |
||||
status: httpStatus.NOT_FOUND, |
||||
message: 'Không tìm thấy địa chỉ tỉnh/thành!' |
||||
}); |
||||
} |
||||
return data; |
||||
} catch (ex) { |
||||
throw ex; |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* List users in descending order of 'createdAt' timestamp. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Supplider[]>} |
||||
*/ |
||||
File.list = async ({ |
||||
name, |
||||
|
||||
// sort
|
||||
sort_by, |
||||
order_by, |
||||
skip = 0, |
||||
limit = 20, |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
const sort = sortConditions({ sort_by, order_by }); |
||||
return File.findAll({ |
||||
where: options, |
||||
order: [sort], |
||||
offset: skip, |
||||
limit: limit |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* Total records. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Number>} |
||||
*/ |
||||
File.totalRecords = ({ |
||||
name |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
|
||||
return File.count({ where: options }); |
||||
}; |
||||
|
||||
/** |
||||
* Filter only allowed fields from File |
||||
* |
||||
* @param {Object} params |
||||
*/ |
||||
File.filterParams = (params) => pick(params, PUBLIC_FIELDS); |
||||
|
||||
/** |
||||
* @typedef File |
||||
*/ |
||||
export default File; |
||||
/* eslint-disable camelcase */ |
||||
import httpStatus from 'http-status'; |
||||
import { Model, DataTypes, Op } from 'sequelize'; |
||||
import { isEqual, isNil, isUndefined, omitBy, pick } from 'lodash'; |
||||
import moment from 'moment-timezone'; |
||||
import { serviceName } from '../../config/vars'; |
||||
import postgres from '../../config/postgres'; |
||||
import APIError from '../utils/APIException'; |
||||
|
||||
/** |
||||
* Create connection |
||||
*/ |
||||
const { sequelize } = postgres; |
||||
class File extends Model { } |
||||
|
||||
const PUBLIC_FIELDS = [ |
||||
'name', |
||||
'title', |
||||
'payload' |
||||
]; |
||||
|
||||
/** |
||||
* File Schema |
||||
* @public |
||||
*/ |
||||
File.init( |
||||
{ |
||||
id: { |
||||
type: DataTypes.INTEGER, |
||||
autoIncrement: true, |
||||
primaryKey: true |
||||
}, |
||||
url: { |
||||
type: DataTypes.STRING(255), |
||||
allowNull: false |
||||
}, |
||||
name: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
title: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
payload: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | code | name
|
||||
}, |
||||
|
||||
// manager
|
||||
is_active: { |
||||
type: DataTypes.BOOLEAN, |
||||
defaultValue: true |
||||
}, |
||||
created_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
updated_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
created_by: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | name
|
||||
}, |
||||
download_count: { |
||||
type: DataTypes.INTEGER, |
||||
defaultValue: 0 |
||||
} |
||||
}, |
||||
{ |
||||
timestamps: false, |
||||
sequelize: sequelize, |
||||
schema: serviceName, |
||||
modelName: 'file', |
||||
tableName: 'tbl_files' |
||||
} |
||||
); |
||||
|
||||
/** |
||||
* Register event emiter |
||||
*/ |
||||
File.Events = { |
||||
File_CREATED: `${serviceName}.file.created`, |
||||
File_UPDATED: `${serviceName}.file.updated`, |
||||
File_DELETED: `${serviceName}.file.deleted`, |
||||
}; |
||||
File.EVENT_SOURCE = `${serviceName}.file`; |
||||
|
||||
/** |
||||
* Add your |
||||
* - pre-save hooks |
||||
* - validations |
||||
* - virtuals |
||||
*/ |
||||
File.addHook('afterCreate', () => { }); |
||||
|
||||
File.addHook('afterUpdate', () => { }); |
||||
|
||||
File.addHook('afterDestroy', () => { }); |
||||
|
||||
/** |
||||
* Load query |
||||
* @param {*} params |
||||
*/ |
||||
function filterConditions(params) { |
||||
const options = omitBy(params, isNil); |
||||
options.is_active = true; |
||||
|
||||
// TODO: load condition
|
||||
if (options.name) { |
||||
options.name = { |
||||
[Op.iLike]: `%${options.name}%` |
||||
}; |
||||
} |
||||
|
||||
return options; |
||||
} |
||||
|
||||
/** |
||||
* Load sort query |
||||
* @param {*} sort_by |
||||
* @param {*} order_by |
||||
*/ |
||||
function sortConditions({ sort_by, order_by }) { |
||||
let sort = null; |
||||
switch (sort_by) { |
||||
case 'created_at': |
||||
sort = ['created_at', order_by]; |
||||
break; |
||||
case 'updated_at': |
||||
sort = ['updated_at', order_by]; |
||||
break; |
||||
default: sort = ['created_at', 'DESC']; |
||||
break; |
||||
} |
||||
return sort; |
||||
} |
||||
|
||||
/** |
||||
* Transform postgres model to expose object |
||||
*/ |
||||
File.transform = (params) => { |
||||
const transformed = {}; |
||||
const fields = [ |
||||
'id', |
||||
'name', |
||||
'payload', |
||||
'created_by' |
||||
]; |
||||
fields.forEach((field) => { |
||||
transformed[field] = params[field]; |
||||
}); |
||||
|
||||
// pipe date
|
||||
const dateFields = [ |
||||
'created_at', |
||||
'updated_at' |
||||
]; |
||||
dateFields.forEach((field) => { |
||||
if (params[field]) { |
||||
transformed[field] = moment(params[field]).unix(); |
||||
} else { |
||||
transformed[field] = null; |
||||
} |
||||
}); |
||||
|
||||
return transformed; |
||||
}; |
||||
|
||||
/** |
||||
* Get all changed properties |
||||
*/ |
||||
File.getChangedProperties = ({ newModel, oldModel }) => { |
||||
const changedProperties = []; |
||||
const allChangableProperties = [ |
||||
'id', |
||||
'name', |
||||
'payload', |
||||
]; |
||||
if (!oldModel) { |
||||
return allChangableProperties; |
||||
} |
||||
|
||||
allChangableProperties.forEach((field) => { |
||||
if ( |
||||
!isUndefined(newModel[field]) && |
||||
!isEqual(newModel[field], oldModel[field]) |
||||
) { |
||||
changedProperties.push(field); |
||||
} |
||||
}); |
||||
|
||||
return changedProperties; |
||||
}; |
||||
|
||||
/** |
||||
* Detail |
||||
* |
||||
* @public |
||||
* @param {string} id |
||||
*/ |
||||
File.get = async (id) => { |
||||
try { |
||||
const data = await File.findOne({ |
||||
where: { |
||||
id, |
||||
is_active: true |
||||
} |
||||
}); |
||||
if (!data) { |
||||
throw new APIError({ |
||||
status: httpStatus.NOT_FOUND, |
||||
message: 'Không tìm thấy địa chỉ tỉnh/thành!' |
||||
}); |
||||
} |
||||
return data; |
||||
} catch (ex) { |
||||
throw ex; |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* List users in descending order of 'createdAt' timestamp. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Supplider[]>} |
||||
*/ |
||||
File.list = async ({ |
||||
name, |
||||
// sort
|
||||
sort_by, |
||||
order_by, |
||||
skip = 0, |
||||
limit = 20, |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
const sort = sortConditions({ sort_by, order_by }); |
||||
return File.findAll({ |
||||
where: options, |
||||
order: [sort], |
||||
offset: skip, |
||||
limit: limit |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* Total records. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Number>} |
||||
*/ |
||||
File.totalRecords = ({ |
||||
name |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
|
||||
return File.count({ where: options }); |
||||
}; |
||||
|
||||
/** |
||||
* Filter only allowed fields from File |
||||
* |
||||
* @param {Object} params |
||||
*/ |
||||
File.filterParams = (params) => pick(params, PUBLIC_FIELDS); |
||||
|
||||
/** |
||||
* @typedef File |
||||
*/ |
||||
export default File; |
||||
|
@ -1,292 +1,293 @@ |
||||
/* eslint-disable camelcase */ |
||||
import httpStatus from 'http-status'; |
||||
import { Model, DataTypes, Op } from 'sequelize'; |
||||
import { isEqual, isNil, isUndefined, omitBy, pick } from 'lodash'; |
||||
import moment from 'moment-timezone'; |
||||
|
||||
import { serviceName } from '../../config/vars'; |
||||
import postgres from '../../config/postgres'; |
||||
import APIError from '../utils/APIException'; |
||||
|
||||
/** |
||||
* Create connection |
||||
*/ |
||||
const { sequelize } = postgres; |
||||
class Image extends Model { } |
||||
|
||||
const PUBLIC_FIELDS = [ |
||||
'name', |
||||
'title', |
||||
'payload' |
||||
]; |
||||
|
||||
Image.Groups = { |
||||
USER: 'users', |
||||
STORE: 'stores', |
||||
VOUCHER: 'vouchers', |
||||
STORIES: 'stories', |
||||
CHAPTERS: 'chapters', |
||||
GAMES: 'games', |
||||
CUSTOMER: 'customers', |
||||
PROMOTION: 'promotions', |
||||
PRODUCT: 'products', |
||||
// configration
|
||||
BANNER: 'banners', |
||||
CATEGORY: 'categories', |
||||
DEFAULT: 'defaults' |
||||
}; |
||||
|
||||
/** |
||||
* Image Schema |
||||
* @public |
||||
*/ |
||||
Image.init( |
||||
{ |
||||
id: { |
||||
type: DataTypes.INTEGER, |
||||
autoIncrement: true, |
||||
primaryKey: true |
||||
}, |
||||
url: { |
||||
type: DataTypes.STRING(255), |
||||
allowNull: false |
||||
}, |
||||
name: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
title: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
payload: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | code | name
|
||||
}, |
||||
|
||||
// manager
|
||||
is_active: { |
||||
type: DataTypes.BOOLEAN, |
||||
defaultValue: true |
||||
}, |
||||
created_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
updated_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
created_by: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | name
|
||||
} |
||||
}, |
||||
{ |
||||
timestamps: false, |
||||
sequelize: sequelize, |
||||
schema: serviceName, |
||||
modelName: 'image', |
||||
tableName: 'tbl_images' |
||||
} |
||||
); |
||||
|
||||
/** |
||||
* Register event emiter |
||||
*/ |
||||
Image.Events = { |
||||
IMAGE_CREATED: `${serviceName}.image.created`, |
||||
IMAGE_UPDATED: `${serviceName}.image.updated`, |
||||
IMAGE_DELETED: `${serviceName}.image.deleted`, |
||||
}; |
||||
Image.EVENT_SOURCE = `${serviceName}.image`; |
||||
|
||||
/** |
||||
* Add your |
||||
* - pre-save hooks |
||||
* - validations |
||||
* - virtuals |
||||
*/ |
||||
Image.addHook('afterCreate', () => { }); |
||||
|
||||
Image.addHook('afterUpdate', () => { }); |
||||
|
||||
Image.addHook('afterDestroy', () => { }); |
||||
|
||||
/** |
||||
* Load query |
||||
* @param {*} params |
||||
*/ |
||||
function filterConditions(params) { |
||||
const options = omitBy(params, isNil); |
||||
options.is_active = true; |
||||
|
||||
// TODO: load condition
|
||||
if (options.name) { |
||||
options.name = { |
||||
[Op.iLike]: `%${options.name}%` |
||||
}; |
||||
} |
||||
|
||||
return options; |
||||
} |
||||
|
||||
/** |
||||
* Load sort query |
||||
* @param {*} sort_by |
||||
* @param {*} order_by |
||||
*/ |
||||
function sortConditions({ sort_by, order_by }) { |
||||
let sort = null; |
||||
switch (sort_by) { |
||||
case 'created_at': |
||||
sort = ['created_at', order_by]; |
||||
break; |
||||
case 'updated_at': |
||||
sort = ['updated_at', order_by]; |
||||
break; |
||||
default: sort = ['created_at', 'DESC']; |
||||
break; |
||||
} |
||||
return sort; |
||||
} |
||||
|
||||
/** |
||||
* Transform postgres model to expose object |
||||
*/ |
||||
Image.transform = (params) => { |
||||
const transformed = {}; |
||||
const fields = [ |
||||
'id', |
||||
'name', |
||||
'payload', |
||||
'created_by' |
||||
]; |
||||
fields.forEach((field) => { |
||||
transformed[field] = params[field]; |
||||
}); |
||||
|
||||
// pipe date
|
||||
const dateFields = [ |
||||
'created_at', |
||||
'updated_at' |
||||
]; |
||||
dateFields.forEach((field) => { |
||||
if (params[field]) { |
||||
transformed[field] = moment(params[field]).unix(); |
||||
} else { |
||||
transformed[field] = null; |
||||
} |
||||
}); |
||||
|
||||
return transformed; |
||||
}; |
||||
|
||||
/** |
||||
* Get all changed properties |
||||
*/ |
||||
Image.getChangedProperties = ({ newModel, oldModel }) => { |
||||
const changedProperties = []; |
||||
const allChangableProperties = [ |
||||
'id', |
||||
'name', |
||||
'payload', |
||||
]; |
||||
if (!oldModel) { |
||||
return allChangableProperties; |
||||
} |
||||
|
||||
allChangableProperties.forEach((field) => { |
||||
if ( |
||||
!isUndefined(newModel[field]) && |
||||
!isEqual(newModel[field], oldModel[field]) |
||||
) { |
||||
changedProperties.push(field); |
||||
} |
||||
}); |
||||
|
||||
return changedProperties; |
||||
}; |
||||
|
||||
/** |
||||
* Detail |
||||
* |
||||
* @public |
||||
* @param {string} id |
||||
*/ |
||||
Image.get = async (id) => { |
||||
try { |
||||
const data = await Image.findOne({ |
||||
where: { |
||||
id, |
||||
is_active: true |
||||
} |
||||
}); |
||||
if (!data) { |
||||
throw new APIError({ |
||||
status: httpStatus.NOT_FOUND, |
||||
message: 'Không tìm thấy địa chỉ tỉnh/thành!' |
||||
}); |
||||
} |
||||
return data; |
||||
} catch (ex) { |
||||
throw ex; |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* List users in descending order of 'createdAt' timestamp. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Supplider[]>} |
||||
*/ |
||||
Image.list = async ({ |
||||
name, |
||||
|
||||
// sort
|
||||
sort_by, |
||||
order_by, |
||||
skip = 0, |
||||
limit = 20, |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
const sort = sortConditions({ sort_by, order_by }); |
||||
return Image.findAll({ |
||||
where: options, |
||||
order: [sort], |
||||
offset: skip, |
||||
limit: limit |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* Total records. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Number>} |
||||
*/ |
||||
Image.totalRecords = ({ |
||||
name |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
|
||||
return Image.count({ where: options }); |
||||
}; |
||||
|
||||
/** |
||||
* Filter only allowed fields from Province |
||||
* |
||||
* @param {Object} params |
||||
*/ |
||||
Image.filterParams = (params) => pick(params, PUBLIC_FIELDS); |
||||
|
||||
/** |
||||
* @typedef Province |
||||
*/ |
||||
export default Image; |
||||
/* eslint-disable camelcase */ |
||||
import httpStatus from 'http-status'; |
||||
import { Model, DataTypes, Op } from 'sequelize'; |
||||
import { isEqual, isNil, isUndefined, omitBy, pick } from 'lodash'; |
||||
import moment from 'moment-timezone'; |
||||
|
||||
import { serviceName } from '../../config/vars'; |
||||
import postgres from '../../config/postgres'; |
||||
import APIError from '../utils/APIException'; |
||||
|
||||
/** |
||||
* Create connection |
||||
*/ |
||||
const { sequelize } = postgres; |
||||
class Image extends Model { } |
||||
|
||||
const PUBLIC_FIELDS = [ |
||||
'name', |
||||
'title', |
||||
'payload' |
||||
]; |
||||
|
||||
Image.Groups = { |
||||
USER: 'users', |
||||
STORE: 'stores', |
||||
VOUCHER: 'vouchers', |
||||
STORIES: 'stories', |
||||
CHAPTERS: 'chapters', |
||||
GAMES: 'games', |
||||
CUSTOMER: 'customers', |
||||
PROMOTION: 'promotions', |
||||
PRODUCT: 'products', |
||||
// configration
|
||||
BANNER: 'banners', |
||||
CATEGORY: 'categories', |
||||
DEFAULT: 'defaults' |
||||
}; |
||||
|
||||
/** |
||||
* Image Schema |
||||
* @public |
||||
*/ |
||||
Image.init( |
||||
{ |
||||
id: { |
||||
type: DataTypes.INTEGER, |
||||
autoIncrement: true, |
||||
primaryKey: true |
||||
}, |
||||
url: { |
||||
type: DataTypes.STRING(255), |
||||
allowNull: false |
||||
}, |
||||
name: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
title: { |
||||
type: DataTypes.STRING(255), |
||||
defaultValue: null |
||||
}, |
||||
payload: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | code | name
|
||||
}, |
||||
|
||||
// manager
|
||||
is_active: { |
||||
type: DataTypes.BOOLEAN, |
||||
defaultValue: true |
||||
}, |
||||
created_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
updated_at: { |
||||
type: DataTypes.DATE, |
||||
defaultValue: DataTypes.NOW |
||||
}, |
||||
created_by: { |
||||
type: DataTypes.JSONB, |
||||
defaultValue: null // id | name
|
||||
} |
||||
}, |
||||
{ |
||||
timestamps: false, |
||||
sequelize: sequelize, |
||||
schema: serviceName, |
||||
modelName: 'image', |
||||
tableName: 'tbl_images' |
||||
} |
||||
); |
||||
|
||||
/** |
||||
* Register event emiter |
||||
*/ |
||||
Image.Events = { |
||||
IMAGE_CREATED: `${serviceName}.image.created`, |
||||
IMAGE_UPDATED: `${serviceName}.image.updated`, |
||||
IMAGE_DELETED: `${serviceName}.image.deleted`, |
||||
}; |
||||
|
||||
Image.EVENT_SOURCE = `${serviceName}.image`; |
||||
|
||||
/** |
||||
* Add your |
||||
* - pre-save hooks |
||||
* - validations |
||||
* - virtuals |
||||
*/ |
||||
Image.addHook('afterCreate', () => { }); |
||||
|
||||
Image.addHook('afterUpdate', () => { }); |
||||
|
||||
Image.addHook('afterDestroy', () => { }); |
||||
|
||||
/** |
||||
* Load query |
||||
* @param {*} params |
||||
*/ |
||||
function filterConditions(params) { |
||||
const options = omitBy(params, isNil); |
||||
options.is_active = true; |
||||
|
||||
// TODO: load condition
|
||||
if (options.name) { |
||||
options.name = { |
||||
[Op.iLike]: `%${options.name}%` |
||||
}; |
||||
} |
||||
|
||||
return options; |
||||
} |
||||
|
||||
/** |
||||
* Load sort query |
||||
* @param {*} sort_by |
||||
* @param {*} order_by |
||||
*/ |
||||
function sortConditions({ sort_by, order_by }) { |
||||
let sort = null; |
||||
switch (sort_by) { |
||||
case 'created_at': |
||||
sort = ['created_at', order_by]; |
||||
break; |
||||
case 'updated_at': |
||||
sort = ['updated_at', order_by]; |
||||
break; |
||||
default: sort = ['created_at', 'DESC']; |
||||
break; |
||||
} |
||||
return sort; |
||||
} |
||||
|
||||
/** |
||||
* Transform postgres model to expose object |
||||
*/ |
||||
Image.transform = (params) => { |
||||
const transformed = {}; |
||||
const fields = [ |
||||
'id', |
||||
'name', |
||||
'payload', |
||||
'created_by' |
||||
]; |
||||
fields.forEach((field) => { |
||||
transformed[field] = params[field]; |
||||
}); |
||||
|
||||
// pipe date
|
||||
const dateFields = [ |
||||
'created_at', |
||||
'updated_at' |
||||
]; |
||||
dateFields.forEach((field) => { |
||||
if (params[field]) { |
||||
transformed[field] = moment(params[field]).unix(); |
||||
} else { |
||||
transformed[field] = null; |
||||
} |
||||
}); |
||||
|
||||
return transformed; |
||||
}; |
||||
|
||||
/** |
||||
* Get all changed properties |
||||
*/ |
||||
Image.getChangedProperties = ({ newModel, oldModel }) => { |
||||
const changedProperties = []; |
||||
const allChangableProperties = [ |
||||
'id', |
||||
'name', |
||||
'payload', |
||||
]; |
||||
if (!oldModel) { |
||||
return allChangableProperties; |
||||
} |
||||
|
||||
allChangableProperties.forEach((field) => { |
||||
if ( |
||||
!isUndefined(newModel[field]) && |
||||
!isEqual(newModel[field], oldModel[field]) |
||||
) { |
||||
changedProperties.push(field); |
||||
} |
||||
}); |
||||
|
||||
return changedProperties; |
||||
}; |
||||
|
||||
/** |
||||
* Detail |
||||
* |
||||
* @public |
||||
* @param {string} id |
||||
*/ |
||||
Image.get = async (id) => { |
||||
try { |
||||
const data = await Image.findOne({ |
||||
where: { |
||||
id, |
||||
is_active: true |
||||
} |
||||
}); |
||||
if (!data) { |
||||
throw new APIError({ |
||||
status: httpStatus.NOT_FOUND, |
||||
message: 'Không tìm thấy địa chỉ tỉnh/thành!' |
||||
}); |
||||
} |
||||
return data; |
||||
} catch (ex) { |
||||
throw ex; |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* List users in descending order of 'createdAt' timestamp. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Supplider[]>} |
||||
*/ |
||||
Image.list = async ({ |
||||
name, |
||||
|
||||
// sort
|
||||
sort_by, |
||||
order_by, |
||||
skip = 0, |
||||
limit = 20, |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
const sort = sortConditions({ sort_by, order_by }); |
||||
return Image.findAll({ |
||||
where: options, |
||||
order: [sort], |
||||
offset: skip, |
||||
limit: limit |
||||
}); |
||||
}; |
||||
|
||||
/** |
||||
* Total records. |
||||
* |
||||
* @param {number} skip - Number of users to be skipped. |
||||
* @param {number} limit - Limit number of users to be returned. |
||||
* @returns {Promise<Number>} |
||||
*/ |
||||
Image.totalRecords = ({ |
||||
name |
||||
}) => { |
||||
const options = filterConditions({ |
||||
name |
||||
}); |
||||
|
||||
return Image.count({ where: options }); |
||||
}; |
||||
|
||||
/** |
||||
* Filter only allowed fields from Province |
||||
* |
||||
* @param {Object} params |
||||
*/ |
||||
Image.filterParams = (params) => pick(params, PUBLIC_FIELDS); |
||||
|
||||
/** |
||||
* @typedef Province |
||||
*/ |
||||
export default Image; |
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,40 +1,40 @@ |
||||
import Sequelize from 'sequelize'; |
||||
import bluebird from 'bluebird'; |
||||
import { postgres, env } from './vars'; |
||||
|
||||
Sequelize.Promise = bluebird; |
||||
|
||||
const defaultErrorHandler = (err) => { |
||||
console.log(`Connection to Postgres error: ${err}`); |
||||
}; |
||||
|
||||
const app = { |
||||
sequelize: new Sequelize( |
||||
postgres.uri, |
||||
{ |
||||
dialect: 'postgres' |
||||
} |
||||
), |
||||
connect(errorHandler = defaultErrorHandler) { |
||||
this.sequelize.authenticate() |
||||
.then(() => { |
||||
console.log('Postgres connection established!'); |
||||
if (env === '1') { |
||||
this.sequelize.sync({ |
||||
alter: true, |
||||
logging: true |
||||
}); |
||||
} |
||||
}).catch((error) => { |
||||
errorHandler(error); |
||||
}); |
||||
return this.sequelize; |
||||
}, |
||||
disconnect() { |
||||
// close connection
|
||||
console.log('Closing postgres connection!'); |
||||
this.sequelize.close(); |
||||
} |
||||
}; |
||||
|
||||
export default app; |
||||
import Sequelize from 'sequelize'; |
||||
import bluebird from 'bluebird'; |
||||
import { postgres, env } from './vars'; |
||||
|
||||
Sequelize.Promise = bluebird; |
||||
|
||||
const defaultErrorHandler = (err) => { |
||||
console.log(`Connection to Postgres error: ${err}`); |
||||
}; |
||||
|
||||
const app = { |
||||
sequelize: new Sequelize( |
||||
postgres.uri, |
||||
{ |
||||
dialect: 'postgres' |
||||
} |
||||
), |
||||
connect(errorHandler = defaultErrorHandler) { |
||||
this.sequelize.authenticate() |
||||
.then(() => { |
||||
console.log('Postgres connection established!'); |
||||
if (env === '1') { |
||||
this.sequelize.sync({ |
||||
alter: true, |
||||
logging: true |
||||
}); |
||||
} |
||||
}).catch((error) => { |
||||
errorHandler(error); |
||||
}); |
||||
return this.sequelize; |
||||
}, |
||||
disconnect() { |
||||
// close connection
|
||||
console.log('Closing postgres connection!'); |
||||
this.sequelize.close(); |
||||
} |
||||
}; |
||||
|
||||
export default app; |
||||
|
Loading…
Reference in new issue