feat: Moving S3 from AWS to minio

This commit is contained in:
Josepablo C
2024-09-28 21:33:22 -06:00
parent 5b4318fa6e
commit bad210fc0b
7 changed files with 69 additions and 89 deletions

View File

@@ -35,6 +35,7 @@
"jsonschema": "^1.4.1",
"jsonwebtoken": "^9.0.2",
"knex": "^2.5.1",
"minio": "^8.0.1",
"mongodb-core": "^3.2.7",
"mongoose": "^7.5.4",
"morgan": "^1.10.0",

View File

@@ -1,22 +1,14 @@
"use strict";
const { ROOT_PATH, LIB_PATH, MODELS_PATH, API_CONFIG } = process.env;
const { getPagination , getPage } = require( `${ROOT_PATH}/${LIB_PATH}/Misc.js` );
const apiConfig = require( `${ROOT_PATH}/${API_CONFIG}` );
const { getPagination , getPage } = require( '../../../lib/Misc' )
const { uploadFile } = require('../../../lib/3R/S3')
const apiConfig = require( '../../../config/apiConfig.json' )
const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3');
const s3Client = new S3Client({
region : apiConfig.S3.region,
credentials : {
accessKeyId : apiConfig.S3.accessKeyId,
secretAccessKey : apiConfig.S3.secretAccessKey
}
});
const s3Bucket = apiConfig.S3.bucket;
const s3BucketKey = apiConfig.S3.load_attachments_key;
const s3Bucket = apiConfig.S3.bucket
const s3BucketKey = apiConfig.S3.load_attachments_key
const Model = require( `${ROOT_PATH}/${MODELS_PATH}/load-attachments.model.js` );
const UserModel = require( `${ROOT_PATH}/${MODELS_PATH}/users.model.js` );
const LoadsModel = require( `${ROOT_PATH}/${MODELS_PATH}/loads.model.js` );
const Model = require('../../../lib/Models/load-attachments.model')
const UserModel = require( '../../../lib/Models/users.model' );
const LoadsModel = require( '../../../lib/Models/loads.model' );
async function getAuthorizationFilter( userId ){
const user = await UserModel.findById( userId );
@@ -123,22 +115,12 @@ async function createLoadAttachment( type , userId , loadId ){
return attachment;
}
async function uploadFile( bucket, key, file , obj_id ){
const params = {
Bucket: bucket,
Key : `${key}/${obj_id}`,
ContentType : file.mimetype,
Body : file.data
};
const s3resp = await s3Client.send( new PutObjectCommand( params ) );
return s3resp;
}
const postLoadingAttachment = async(req, res) => {
const loadId = req.params.id;
const attachment = await createLoadAttachment( "Loading", req.JWT.payload.sub , loadId );
const file = req.files.attachment;
if( attachment && file ){
const s3resp = await uploadFile( s3Bucket, s3BucketKey, file , attachment._id );
await uploadFile( s3Bucket, s3BucketKey, attachment._id, file );
res.send( attachment );
}else if( !file ){
res.status(400).send({ error : "attachment file not found" , code: 400 });
@@ -152,7 +134,7 @@ const postDownloadingAttachment = async(req, res) => {
const attachment = await createLoadAttachment( "Downloading", req.JWT.payload.sub , loadId );
const file = req.files.attachment;
if( attachment && file ){
const s3resp = await uploadFile( s3Bucket, s3BucketKey, file , attachment._id );
await uploadFile( s3Bucket, s3BucketKey, attachment._id, file );
res.send( attachment );
}else if( !file ){
res.status(400).send({ error : "attachment file not found" , code: 400 });

View File

@@ -1,15 +1,14 @@
"use strict";
const { API_CONFIG, ROOT_PATH, LIB_PATH, MODELS_PATH, HANDLERS_PATH } = process.env;
const { getPagination } = require( `${ROOT_PATH}/${LIB_PATH}/Misc.js` );
const { GenericHandler } = require( `${ROOT_PATH}/${HANDLERS_PATH}/Generic.handler.js` );
const { downloadFile } = require(`${ROOT_PATH}/${LIB_PATH}/Misc`);
const { getPagination } = require( '../../../lib/Misc' );
const { GenericHandler } = require( '../../../lib/Handlers/Generic.handler' );
const { downloadFile } = require('../../../lib/3R/S3/index');
const apiConfig = require( `${ROOT_PATH}/${API_CONFIG}` );
const apiConfig = require( '../../../config/apiConfig.json' );
const s3Bucket = apiConfig.S3.bucket;
const s3BucketKey = apiConfig.S3.news_key;
const Model = require( `${ROOT_PATH}/${MODELS_PATH}/news.model.js` );
const Model = require( '../../../lib/Models/news.model' );
const generic = new GenericHandler( Model );
const getList = async(req, res) => {

View File

@@ -1,7 +1,6 @@
"use strict";
const { ROOT_PATH, LIB_PATH, MODELS_PATH, API_CONFIG } = process.env;
const { downloadFile } = require(`${ROOT_PATH}/${LIB_PATH}/Misc`);
const apiConfig = require( `${ROOT_PATH}/${API_CONFIG}` );
const { downloadFile } = require('../../../lib/3R/S3/index');
const apiConfig = require('../../../config/apiConfig.json');
const s3Bucket = apiConfig.S3.bucket;
const s3BucketKey = apiConfig.S3.load_attachments_key;

View File

@@ -21,12 +21,18 @@
"date":"03/2024"
},
"S3" : {
"accessKeyId": "AKIAXTQEUF6MLCHTUIKW",
"secretAccessKey": "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",
"bucket": "enruta",
"bucket": "etaviaporte",
"load_attachments_key":"loadattachments",
"news_key":"news",
"region": "us-west-1"
"driver":{
"minio":{
"endPoint": "api.minio.etaviaporte.jcruzbaas.com",
"port": 443,
"useSSL": true,
"accessKey": "Ygw68RydX2mvktAwjFEj",
"secretKey": "FERMPPXAu68YWgu4enWUeZPINRcjIrhw8BaQM6ur"
}
}
},
"sendgrid" : {
"HOST": "smtp.sendgrid.net",

View File

@@ -1,48 +0,0 @@
{
"authentication": {
"pwdSecret":"Nx2g_IWo2Zt_LS$+",
"jwtSecret":"9o3BBz0EsrwXliwEJ/SFuywZoN8=",
"jwtTimeout":24,
"jwtRenewalTimeout":720,
"tokenSecret":"9Z'jMt|(h_f(&/S+zv.K",
"jwtOptions": {
"header": {
"typ": "access"
},
"audience": "https://www.etaviaporte.com",
"issuer": "etaviaporte",
"algorithm": "HS256",
"expiresIn": "1d"
}
},
"version" : {
"version" : "1.1.1",
"name": "ETA Beta",
"date":"03/2024"
},
"S3" : {
"accessKeyId": "AKIAXTQEUF6MLCHTUIKW",
"secretAccessKey": "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",
"bucket": "enruta",
"load_attachments_key":"loadattachments",
"news_key":"news",
"region": "us-west-1"
},
"sendgrid" : {
"HOST": "smtp.sendgrid.net",
"PORT": "465",
"username": "apikey",
"API_KEY": "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",
"FROM": "noreply@etaviaporte.com"
},
"email_standalone" : {
"host": "smtp.hostinger.com",
"port": "465",
"secure": true,
"auth": {
"user": "noreply@etaviaporte.com",
"pass": "-)WJt[oP~P$`76Q4"
}
},
"mongodb": "mongodb://localhost/etaviaporte?retryWrites=true&w=majority"
}

41
v1/src/lib/3R/S3/index.js Normal file
View File

@@ -0,0 +1,41 @@
"use strict";
const Minio = require( 'minio' );
const apiConfig = require( '../../../config/apiConfig.json' );
const minioConfig = apiConfig.S3.driver.minio;
const minioClient = new Minio.Client({
endPoint: minioConfig.endPoint,
port: minioConfig.port,
useSSL: minioConfig.useSSL,
accessKey: minioConfig.accessKey,
secretKey: minioConfig.secretKey
})
async function downloadFile( bucket, key, obj_id ){
const obj_name = `${key}/${obj_id}`
const stat = await minioClient.statObject( bucket, obj_name )
const dataStream = await minioClient.getObject(bucket, obj_name)
const file_data = {
ContentType : stat.metaData['content-type']
}
const chunks = []
for await (const chunk of dataStream) {
chunks.push(chunk)
}
file_data.Body = Buffer.concat(chunks)
return file_data
}
async function uploadFile( bucket, key, obj_id, file ){
const obj_name = `${key}/${obj_id}`
const resp = await minioClient.putObject( bucket, obj_name, file.data, file.size, {
'content-type' : file.mimetype
} )
}
module.exports = { downloadFile, uploadFile };