feat: Adding news/download endpoint
This commit is contained in:
@@ -5,5 +5,6 @@ const services= require('./services.js');
|
||||
router.get('/', services.getList);
|
||||
router.get('/find', services.findList);
|
||||
router.get('/:id', services.getById);
|
||||
router.get('/download/:image_name', services.getImageByNewId);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
"use strict";
|
||||
const { ROOT_PATH, LIB_PATH, MODELS_PATH, HANDLERS_PATH } = process.env;
|
||||
const { API_CONFIG, ROOT_PATH, LIB_PATH, MODELS_PATH, HANDLERS_PATH } = process.env;
|
||||
const { getPagination } = require( `${ROOT_PATH}/${LIB_PATH}/Misc.js` );
|
||||
const { GenericHandler } = require( `${ROOT_PATH}/${HANDLERS_PATH}/Generic.handler.js` );
|
||||
const Model = require( `${ROOT_PATH}/${MODELS_PATH}/news.model.js` );
|
||||
const { downloadFile } = require(`${ROOT_PATH}/${LIB_PATH}/Misc`);
|
||||
|
||||
const apiConfig = require( `${ROOT_PATH}/${API_CONFIG}` );
|
||||
|
||||
const s3Bucket = apiConfig.S3.bucket;
|
||||
const s3BucketKey = apiConfig.S3.news_key;
|
||||
|
||||
const Model = require( `${ROOT_PATH}/${MODELS_PATH}/news.model.js` );
|
||||
const generic = new GenericHandler( Model );
|
||||
|
||||
const getList = async(req, res) => {
|
||||
@@ -30,4 +36,17 @@ const getById = async(req, res) => {
|
||||
res.send( retVal );
|
||||
};
|
||||
|
||||
module.exports = { getList , findList , getById };
|
||||
const getImageByNewId = async(req, res) => {
|
||||
try{
|
||||
const image_name = req.params.image_name;
|
||||
const file = await downloadFile( s3Bucket, s3BucketKey, image_name );
|
||||
res.attachment( image_name );
|
||||
res.setHeader('Content-Type', file.ContentType );
|
||||
res.send( file.Body );
|
||||
} catch ( err ){
|
||||
console.error( err );
|
||||
return res.status(500).send({ error : "News: Internal error" });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { getList , findList , getById, getImageByNewId };
|
||||
|
||||
@@ -1,32 +1,11 @@
|
||||
"use strict";
|
||||
const { ROOT_PATH, LIB_PATH, MODELS_PATH, API_CONFIG } = process.env;
|
||||
const { downloadFile } = require(`${ROOT_PATH}/${LIB_PATH}/Misc`);
|
||||
const apiConfig = require( `${ROOT_PATH}/${API_CONFIG}` );
|
||||
|
||||
const { S3Client, GetObjectCommand } = require('@aws-sdk/client-s3');
|
||||
const s3Client = new S3Client({
|
||||
region : apiConfig.S3.region,
|
||||
credentials : {
|
||||
accessKeyId : apiConfig.S3.accessKeyId,
|
||||
secretAccessKey : apiConfig.S3.secretAccessKey
|
||||
}
|
||||
});
|
||||
const s3Bucket = apiConfig.S3.bucket;
|
||||
const s3BucketKey = apiConfig.S3.key;
|
||||
const s3BucketKey = apiConfig.S3.load_attachments_key;
|
||||
|
||||
async function downloadFile( bucket, key, obj_id ){
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key : `${key}/${obj_id}`
|
||||
};
|
||||
const s3resp = await s3Client.send( new GetObjectCommand( params ) );
|
||||
const chunks = []
|
||||
for await (const chunk of s3resp.Body) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
const body = Buffer.concat(chunks);
|
||||
s3resp.Body = body;
|
||||
return s3resp;
|
||||
}
|
||||
const getAttachmentFile = async(req, res) => {
|
||||
const attachmentId = req.params.id;
|
||||
const file = await downloadFile( s3Bucket, s3BucketKey, attachmentId );
|
||||
|
||||
@@ -24,7 +24,8 @@
|
||||
"accessKeyId": "AKIAXTQEUF6MLCHTUIKW",
|
||||
"secretAccessKey": "QhM8gQ5O3hVDIf41YeO5/A6Wo58D1xQz8pzxBB2W",
|
||||
"bucket": "enruta",
|
||||
"key":"loadattachments",
|
||||
"load_attachments_key":"loadattachments",
|
||||
"news_key":"news",
|
||||
"region": "us-west-1"
|
||||
},
|
||||
"sendgrid" : {
|
||||
|
||||
@@ -24,6 +24,8 @@
|
||||
"accessKeyId": "AKIAXTQEUF6MLCHTUIKW",
|
||||
"secretAccessKey": "QhM8gQ5O3hVDIf41YeO5/A6Wo58D1xQz8pzxBB2W",
|
||||
"bucket": "enruta",
|
||||
"load_attachments_key":"loadattachments",
|
||||
"news_key":"news",
|
||||
"region": "us-west-1"
|
||||
},
|
||||
"sendgrid" : {
|
||||
|
||||
@@ -3,6 +3,15 @@ const { ROOT_PATH, API_CONFIG } = process.env;
|
||||
const apiConfig = require( `${ROOT_PATH}/${API_CONFIG}` );
|
||||
const crypto = require('crypto');
|
||||
|
||||
const { S3Client, GetObjectCommand } = require('@aws-sdk/client-s3');
|
||||
const s3Client = new S3Client({
|
||||
region : apiConfig.S3.region,
|
||||
credentials : {
|
||||
accessKeyId : apiConfig.S3.accessKeyId,
|
||||
secretAccessKey : apiConfig.S3.secretAccessKey
|
||||
}
|
||||
});
|
||||
|
||||
const secret = apiConfig.authentication.jwtSecret;
|
||||
/**
|
||||
* Convert string to sha256 string in hex
|
||||
@@ -74,4 +83,19 @@ async function queryPage(page, elements, model, filter=null, projection=null){
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { genKey , toSha256, getPagination, getPage, queryPage };
|
||||
async function downloadFile( bucket, key, obj_id ){
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key : `${key}/${obj_id}`
|
||||
};
|
||||
const s3resp = await s3Client.send( new GetObjectCommand( params ) );
|
||||
const chunks = []
|
||||
for await (const chunk of s3resp.Body) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
const body = Buffer.concat(chunks);
|
||||
s3resp.Body = body;
|
||||
return s3resp;
|
||||
}
|
||||
|
||||
module.exports = { genKey , toSha256, getPagination, getPage, queryPage, downloadFile};
|
||||
Reference in New Issue
Block a user