Skip to content

Commit

Permalink
feat: set public-read ACL and redirect to empty image if not found (#20)
Browse files Browse the repository at this point in the history
* feat(files): add optimize image and set health check to 01:00 every day

* chore: remove console log

* feat(files): set public-read ACL by default and fallback default image

* Use @see in comment document

Co-Authored-By: nguyenthienthanh <[email protected]>

* refactor(files): rename and refactor rePutAllObjectsACL function

* fix(routes): getPublicUrl is not defined

* chore: use typescript jsdoc comments

* refactor(files): remove count variables

* chore: remove @param document
  • Loading branch information
nguyenthienthanh authored and duongdev committed Apr 9, 2019
1 parent b76769d commit 7019eab
Show file tree
Hide file tree
Showing 4 changed files with 192 additions and 97 deletions.
175 changes: 170 additions & 5 deletions src/functions/files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import sharp = require('sharp')
import logger from 'utils/logger'

const Bucket = process.env.AWS_S3_BUCKET
const Region = process.env.AWS_REGION

const imageSizes: Array<{ name: SizeName; maxWidth: number }> = [
{
Expand All @@ -33,6 +34,15 @@ const imageSizes: Array<{ name: SizeName; maxWidth: number }> = [

type SizeName = 'original' | 'large' | 'medium' | 'small' | 'thumb'

type ACL =
| 'private'
| 'public-read'
| 'public-read-write'
| 'authenticated-read'
| 'aws-exec-read'
| 'bucket-owner-read'
| 'bucket-owner-full-control'

export const s3 = new S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
Expand All @@ -41,6 +51,12 @@ export const s3 = new S3({

const UPLOAD_DIR = process.env.UPLOAD_DIR || 'uploads/'

export const FILE_LOCATIONS = {
LOCAL: 'local',
S3: 's3',
NOT_EXIST: 'not_exist',
}

export const getFilePath = (fileName: string) =>
path.resolve(UPLOAD_DIR, fileName)

Expand All @@ -56,18 +72,18 @@ export const getFileLocation = async (fileName: string) => {

if (existsOnS3) {
logger.verbose(`[getFileLocation][%s] file location is S3`, fileName)
return 's3'
return FILE_LOCATIONS.S3
}

const existsOnLocal = fs.existsSync(getFilePath(fileName))

if (existsOnLocal) {
logger.verbose(`[getFileLocation][%s] file location is LOCAL`, fileName)
return 'local'
return FILE_LOCATIONS.LOCAL
}

logger.verbose(`[getFileLocation][%s] file location is NOT_EXIST`, fileName)
return 'not_exist'
return FILE_LOCATIONS.NOT_EXIST
}

export const fileExists = async (fileName: string) => {
Expand Down Expand Up @@ -107,6 +123,15 @@ const readFileFromS3 = (fileName: string) => {
})
}

export const readFileBufferFromLocal = async (fileName: string) => {
if (fs.existsSync(getFilePath(fileName))) {
logger.verbose(`[readFileBuffer][%s] File found on local`, fileName)
return fs.readFileSync(getFilePath(fileName))
}

return
}

export const readFileBuffer = async (fileName: string) => {
logger.verbose(`[readFileBuffer][%s] Getting file buffer`, fileName)

Expand Down Expand Up @@ -137,7 +162,9 @@ export const uploadFileToS3 = async (fileName: string) => {

const fileBuffer = fs.readFileSync(getFilePath(fileName))

return s3.putObject({ Bucket, Key: fileName, Body: fileBuffer }).promise()
return s3
.putObject({ Bucket, Key: fileName, Body: fileBuffer, ACL: 'public-read' })
.promise()
}

export const getFileMimeType = async (fileName: string) => {
Expand All @@ -150,7 +177,14 @@ export const getFileMimeType = async (fileName: string) => {
return await getFileType(fileBuffer)
}

const generateFileNameWithSize = (fileName: string, sizeName: SizeName) => {
export const generateFileNameWithSize = (
fileName: string,
sizeName?: SizeName,
) => {
if (!sizeName) {
return fileName
}

const sizeSuffix = sizeName === 'original' ? '' : `-${sizeName}`

return /\./.test(fileName)
Expand Down Expand Up @@ -280,6 +314,128 @@ export const reUploadImageToS3AndRemove = async (startAt: number = 0) => {
}
}

/**
* @see https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#listObjects-property
*/
export const listObjects = ({
limit,
marker,
}: {
/** Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more. */
limit?: number
/** Specifies the key to start with when listing objects in a bucket. */
marker?: string
} = {}) => {
return s3
.listObjects({
Bucket,
// up to 1000
MaxKeys: limit,
Marker: marker,
})
.promise()
}

export const listAllObjects = async (
startAt?: number,
marker?: string,
prevObjects?: S3.Object[],
): Promise<S3.Object[]> => {
logger.verbose(
`[Get objects from ${startAt}000-${startAt + 1}000]: marker ${marker}`,
)
const response: S3.ListObjectsOutput = await listObjects({
marker,
})

const objects = prevObjects.concat(response.Contents)

if (response.IsTruncated) {
return await listAllObjects(
startAt + 1,
response.Contents.slice(-1)[0].Key,
objects,
)
}

return objects
}

export const putObjectACL = ({ key, acl }: { key: string; acl: ACL }) => {
return s3
.putObjectAcl({
Bucket,
Key: key,
ACL: acl,
})
.promise()
}

export const rePutAllErrorObjectsACL = async (
prevObjects: S3.Object[],
): Promise<void> => {
logger.verbose(`[Re put error object ACL]: ${prevObjects.length} objects`)

const objects = (await Promise.all(
prevObjects.map(async item => {
try {
await putObjectACL({ key: item.Key, acl: 'public-read' })
return null
} catch (err) {
logger.error(`[Put object ACL error]: ${item.Key}`, err)
return item
}
}),
)).filter(object => object)

if (objects.length < 1) {
logger.info(`[Re put error object ACL has finished]`)
} else {
return await rePutAllErrorObjectsACL(objects)
}
}

export const rePutAllObjectsACL = async (
startAt: number = 0,
marker?: string,
prevErrorObjects?: S3.Object[],
): Promise<void> => {
let errorObjects: S3.Object[] = prevErrorObjects

logger.verbose(
`[Put object ACL from ${startAt}000-${startAt + 1}000]: marker ${marker}`,
)

const response: S3.ListObjectsOutput = await listObjects({
marker,
})

await Promise.all(
response.Contents.map(async item => {
try {
return await putObjectACL({ key: item.Key, acl: 'public-read' })
} catch (err) {
errorObjects = errorObjects.concat(item)
logger.error(`[Put object ACL error]: ${item.Key}`, err)
}
}),
)

if (response.IsTruncated) {
return await rePutAllObjectsACL(
startAt + 1,
response.Contents.slice(-1)[0].Key,
errorObjects,
)
} else {
logger.info(
`[Put object ACl has finished] with ${errorObjects.length} errors`,
)

return await rePutAllErrorObjectsACL(errorObjects)
}
}

export const getObjectUrl = (
fileName: string,
{ size = 'original' }: { size?: SizeName },
Expand All @@ -292,3 +448,12 @@ export const getObjectUrl = (
Expires: 60 * 24 * 7,
})
}

export const getObjectPublicUrl = (
fileName: string,
{ size = 'original' }: { size?: SizeName } = {},
) => {
const fileNameWithSize = generateFileNameWithSize(fileName, size)

return `https://${Bucket}.s3.${Region}.amazonaws.com/${fileNameWithSize}`
}
114 changes: 22 additions & 92 deletions src/routes/index.ts
Original file line number Diff line number Diff line change
@@ -1,42 +1,20 @@
import express from 'express'
import ExpressRedisCache from 'express-redis-cache'
import fileType from 'file-type'
import { getObjectUrl, readFileBuffer } from 'functions/files'
import { processImage } from 'functions/images'
import {
FILE_LOCATIONS,
generateFileNameWithSize,
getFileLocation,
getObjectPublicUrl,
} from 'functions/files'
import {
filesProcessing,
multer,
renameFilesToChecksum,
} from 'middlewares/files'
import path from 'path'
import redis from 'redis'
import logger from 'utils/logger'

const router = express.Router()

const redisClient = redis.createClient({ url: process.env.REDIS_URI })

const DEFAULT_TTL = +(process.env.CACHE_TTL || 60)

const cache = ExpressRedisCache({
client: redisClient,
prefix: 'file',
expire: DEFAULT_TTL, // 1 min,
})

cache.on('message', message => logger.verbose('Cached %s', message))
cache.on('connected', () => logger.verbose('Cache redis server connected'))
cache.on('disconnected', () => logger.verbose('Cache redis server connected'))
cache.on('error', error => logger.error('Cache redis server error %o', error))
cache.on('deprecated', deprecated =>
logger.warning('deprecated warning', {
type: deprecated.type,
name: deprecated.name,
substitute: deprecated.substitute,
file: deprecated.file,
line: deprecated.line,
}),
)
const DEFAULT_IMAGE_NAME = '8acd942c9940ce0a7df1a8e15d4bad81'

router.put(
'/images',
Expand All @@ -49,73 +27,25 @@ router.put(
},
)

router.get(
'/:fileName',
(req, res, next) => {
const { cache: enableCache = 'true' } = req.query

if (
enableCache === 'false' ||
process.env.DISABLE_EXPRESS_CACHING === 'true'
) {
return next()
}

// const imageFormat = req.query.format || 'webp'
router.get('/:fileName', async (req, res, next) => {
const fileName: string = req.params.fileName

res.express_redis_cache_name = `${req.originalUrl}`
return cache.route({
binary: true,
expire: {
200: DEFAULT_TTL,
404: 15,
xxx: 1,
},
})(req, res, next)
},
async (req, res, next) => {
const fileName: string = req.params.fileName
// const imageFormat = req.query.format

logger.verbose('Getting file %s', fileName)

res.redirect(getObjectUrl(fileName, req.query), 301)

// try {
// const fileBuffer = await readFileBuffer(fileName)
logger.verbose('Getting file %s', fileName)

// if (!fileBuffer) {
// return res
// .header('Cache-Control', 'private')
// .status(404)
// .sendFile(path.resolve(__dirname, '../../static/empty.webp'))
// }
const fileNameWithSize = generateFileNameWithSize(
fileName,
req.query && req.query.size,
)

// const optimizedFileBuffer = fileType(fileBuffer).mime.startsWith('image/')
// ? await (await processImage(
// fileBuffer,
// req.query,
// imageFormat === 'jpeg' ? 'jpeg' : 'webp',
// )).toBuffer()
// : fileBuffer
const location = await getFileLocation(fileNameWithSize)

// logger.verbose(
// 'Downloaded file %s %s',
// fileName,
// fileType(fileBuffer).mime,
// )
switch (location) {
case FILE_LOCATIONS.S3:
return res.redirect(301, getObjectPublicUrl(fileName, req.query))

// logger.info(getObjectUrl(fileName))

// res
// .header('Cache-Control', 'public, max-age=31536000')
// .contentType(fileType(optimizedFileBuffer).mime)
// .send(optimizedFileBuffer)
// } catch (err) {
// logger.error(err)
// throw err
// }
},
)
default:
return res.redirect(301, getObjectPublicUrl(DEFAULT_IMAGE_NAME))
}
})

export default router
Binary file added static/empty.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file removed static/empty.webp
Binary file not shown.

0 comments on commit 7019eab

Please sign in to comment.