Skip to content

Commit

Permalink
refactor: refactor image processor (#18)
Browse files Browse the repository at this point in the history
* feat(files): add optimize image and set health check to 01:00 every day

* chore: remove console log
  • Loading branch information
nguyenthienthanh authored and duongdev committed Apr 1, 2019
1 parent dcec40a commit 28dcace
Show file tree
Hide file tree
Showing 5 changed files with 193 additions and 63 deletions.
3 changes: 2 additions & 1 deletion .env
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ MAX_FILE_SIZE=100 # in MB

REDIS_URI=redis://127.0.0.1:6379
CACHE_TTL=60
DELAY_AFTER_UPLOADED=5000
DELAY_AFTER_UPLOADED=5000
HEALTH_CHECK_CRON=0 1 * * *
152 changes: 139 additions & 13 deletions src/functions/files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,31 @@ import logger from 'utils/logger'

const Bucket = process.env.AWS_S3_BUCKET

const imageSizes: Array<{ name: SizeName; maxWidth: number }> = [
{
name: 'original',
maxWidth: 1366,
},
{
name: 'large',
maxWidth: 1024,
},
{
name: 'medium',
maxWidth: 768,
},
{
name: 'small',
maxWidth: 448,
},
{
name: 'thumb',
maxWidth: 128,
},
]

type SizeName = 'original' | 'large' | 'medium' | 'small' | 'thumb'

export const s3 = new S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
Expand Down Expand Up @@ -85,16 +110,16 @@ const readFileFromS3 = (fileName: string) => {
export const readFileBuffer = async (fileName: string) => {
logger.verbose(`[readFileBuffer][%s] Getting file buffer`, fileName)

const s3Buffer = await readFileFromS3(fileName)
if (s3Buffer) {
return s3Buffer
}

if (fs.existsSync(getFilePath(fileName))) {
logger.verbose(`[readFileBuffer][%s] File found on local`, fileName)
return fs.readFileSync(getFilePath(fileName))
}

const s3Buffer = await readFileFromS3(fileName)
if (s3Buffer) {
return s3Buffer
}

logger.verbose(
`[readFileBuffer][%s] File not found on local or s3. Return null`,
fileName,
Expand Down Expand Up @@ -125,6 +150,47 @@ export const getFileMimeType = async (fileName: string) => {
return await getFileType(fileBuffer)
}

const generateFileNameWithSize = (fileName: string, sizeName: SizeName) => {
const sizeSuffix = sizeName === 'original' ? '' : `-${sizeName}`

return /\./.test(fileName)
? fileName.replace(/\./, `${sizeSuffix}.`)
: `${fileName}${sizeSuffix}`
}

export const resizeImage = (fileName: string, buffer: Buffer) => {
return Promise.all(
imageSizes.map(async size => {
const resizedBuffer = await sharp(buffer)
.resize(size.maxWidth, null, {
withoutEnlargement: true,
})
.jpeg()
.toBuffer()

const filePath = path.resolve(
'uploads/',
generateFileNameWithSize(fileName, size.name),
)

return fs.writeFileSync(filePath, resizedBuffer)
}),
)
}

export const uploadImagesToS3AndRemove = (fileName: string) => {
return Promise.all(
imageSizes.map(async size => {
const fileNameWithSize = generateFileNameWithSize(fileName, size.name)

if (fs.existsSync(getFilePath(fileNameWithSize))) {
await uploadFileToS3(fileNameWithSize)
removeFile(fileNameWithSize)
}
}),
)
}

export const processAndUpload = async (fileName: string) => {
const filePath = getFilePath(fileName)
const imageLocation = await getFileLocation(fileName)
Expand All @@ -149,20 +215,80 @@ export const processAndUpload = async (fileName: string) => {
) {
const originalImageBuffer = await readFileBuffer(fileName)

const convertedImageBuffer = await sharp(originalImageBuffer)
.webp()
.toBuffer()
fs.writeFileSync(filePath, convertedImageBuffer)
await resizeImage(fileName, originalImageBuffer)
}

await uploadFileToS3(fileName)
removeFile(fileName)
await uploadImagesToS3AndRemove(fileName)
}

export const getObjectUrl = (fileName: string) => {
export const reProcessLocalFiles = async (startAt: number = 0) => {
const files = await fs.readdirSync(UPLOAD_DIR)
const maxJobs = 10

let count = startAt
while (count < files.length) {
const jobFiles = files.slice(count, count + maxJobs)

await Promise.all(
jobFiles.map(async (fileName, idx) => {
// skip if this is resized file
if (/-/.test(fileName)) {
return
}

logger.verbose(`[${count + idx}]: ${fileName}`)
const buffer = await readFileBuffer(fileName)
const fileMimeType = await getFileType(buffer)

if (fileMimeType && fileMimeType.mime.startsWith('image')) {
await resizeImage(fileName, buffer)
}
return
}),
)

count += maxJobs
}
}

export const reUploadImageToS3AndRemove = async (startAt: number = 0) => {
const files = await fs.readdirSync(UPLOAD_DIR)
const maxJobs = 10
let count = startAt

while (count < files.length) {
const jobFiles = files.slice(count, count + maxJobs)

await Promise.all(
jobFiles.map(async (fileName, idx) => {
logger.verbose(`[${count + idx}]: ${fileName}`)
const buffer = await readFileBuffer(fileName)
const fileMimeType = await getFileType(buffer)

if (fileMimeType && fileMimeType.mime.startsWith('image')) {
if (fs.existsSync(getFilePath(fileName))) {
await s3
.putObject({ Bucket, Key: fileName, Body: buffer })
.promise()
removeFile(fileName)
}
}
}),
)

count += maxJobs
}
}

export const getObjectUrl = (
fileName: string,
{ size = 'original' }: { size?: SizeName },
) => {
const fileNameWithSize = generateFileNameWithSize(fileName, size)

return s3.getSignedUrl('getObject', {
Bucket,
Key: fileName,
Key: fileNameWithSize,
Expires: 60 * 24 * 7,
})
}
3 changes: 2 additions & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,11 @@ app.use((error: Error, req: Request, res: Response, next: NextFunction) => {
})
})

/** Run cron job at 01:00AM every day */
serverStartingHealthCheck()
.then(() => {
imageHealthCheckQueue.add('clean-uploads-dir', null, {
repeat: { every: 500 },
repeat: { cron: process.env.HEALTH_CHECK_CRON || '0 1 * * *' },
})
app.listen(process.env.PORT, () =>
logger.info('Server has started with %o', {
Expand Down
19 changes: 11 additions & 8 deletions src/jobs/image-processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@ import {
getFilePath,
readFileBuffer,
removeFile,
resizeImage,
uploadFileToS3,
uploadImagesToS3AndRemove,
} from 'functions/files'
import sharp = require('sharp')
import logger from 'utils/logger'
Expand Down Expand Up @@ -62,16 +64,17 @@ imageQueue.process(async (job, done) => {

job.progress(50)

const convertedImageBuffer = await sharp(originalImageBuffer)
.webp()
.toBuffer()
fs.writeFileSync(filePath, convertedImageBuffer)
// const convertedImageBuffer = await sharp(originalImageBuffer)
// .webp()
// .toBuffer()
// fs.writeFileSync(filePath, convertedImageBuffer)
await resizeImage(fileName, originalImageBuffer)
job.progress(75)
await uploadImagesToS3AndRemove(fileName)
}

job.progress(75)

await uploadFileToS3(fileName)
removeFile(fileName)
// await uploadFileToS3(fileName)
// removeFile(fileName)

done()
})
Expand Down
79 changes: 39 additions & 40 deletions src/routes/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,9 @@ router.get(
return next()
}

const imageFormat = req.query.format || 'webp'
// const imageFormat = req.query.format || 'webp'

res.express_redis_cache_name = `${req.originalUrl}-${imageFormat}`
res.express_redis_cache_name = `${req.originalUrl}`
return cache.route({
binary: true,
expire: {
Expand All @@ -75,47 +75,46 @@ router.get(
},
async (req, res, next) => {
const fileName: string = req.params.fileName
const imageFormat = req.query.format
// const imageFormat = req.query.format

logger.verbose('Getting file %s', fileName)

res.redirect(getObjectUrl(fileName), 301)
return

try {
const fileBuffer = await readFileBuffer(fileName)

if (!fileBuffer) {
return res
.header('Cache-Control', 'private')
.status(404)
.sendFile(path.resolve(__dirname, '../../static/empty.webp'))
}

const optimizedFileBuffer = fileType(fileBuffer).mime.startsWith('image/')
? await (await processImage(
fileBuffer,
req.query,
imageFormat === 'jpeg' ? 'jpeg' : 'webp',
)).toBuffer()
: fileBuffer

logger.verbose(
'Downloaded file %s %s',
fileName,
fileType(fileBuffer).mime,
)

logger.info(getObjectUrl(fileName))

res
.header('Cache-Control', 'public, max-age=31536000')
.contentType(fileType(optimizedFileBuffer).mime)
.send(optimizedFileBuffer)
} catch (err) {
logger.error(err)
throw err
}
res.redirect(getObjectUrl(fileName, req.query), 301)

// try {
// const fileBuffer = await readFileBuffer(fileName)

// if (!fileBuffer) {
// return res
// .header('Cache-Control', 'private')
// .status(404)
// .sendFile(path.resolve(__dirname, '../../static/empty.webp'))
// }

// const optimizedFileBuffer = fileType(fileBuffer).mime.startsWith('image/')
// ? await (await processImage(
// fileBuffer,
// req.query,
// imageFormat === 'jpeg' ? 'jpeg' : 'webp',
// )).toBuffer()
// : fileBuffer

// logger.verbose(
// 'Downloaded file %s %s',
// fileName,
// fileType(fileBuffer).mime,
// )

// logger.info(getObjectUrl(fileName))

// res
// .header('Cache-Control', 'public, max-age=31536000')
// .contentType(fileType(optimizedFileBuffer).mime)
// .send(optimizedFileBuffer)
// } catch (err) {
// logger.error(err)
// throw err
// }
},
)

Expand Down

0 comments on commit 28dcace

Please sign in to comment.