Skip to content

Commit

Permalink
Merge pull request #2 from cubos-academy/feature/s3-file-upload
Browse files Browse the repository at this point in the history
feat: add support for S3 file upload
  • Loading branch information
eowfenth authored May 4, 2020
2 parents 0e09977 + 02fbf9d commit 1c2d072
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 16 deletions.
6 changes: 5 additions & 1 deletion .env-example
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,8 @@ DB_USER=
DB_PASSWORD=
DB_PORT=
JWT_SECRET=
JWT_EXPIRE_TIME=
JWT_EXPIRE_TIME=
UPLOAD_STORAGE=
AWS_ACCESS_KEY=
AWS_SECRET_KEY=
AWS_S3_BUCKETNAME=
7 changes: 4 additions & 3 deletions migrations/1588124602515_users-pictures.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@ exports.shorthands = undefined;
exports.up = pgm => {
pgm.createTable("users_pictures", {
id: { type: "UUID", primaryKey: true, default: pgm.func("uuid_generate_v4()") },
"user_id": { type: "UUID", notNull: true, foreignKey: "true" },
"user_id": { type: "UUID", notNull: true },
url: { type: "TEXT", notNull: true },
created_at: { type: "TIMESTAMP", default: pgm.func("NOW()") },
deleted_at: { type: "TIMESTAMP", default: null },
}, { isNotExists: true });
pgm.addConstraint("users", "user_id", {
pgm.addConstraint("users_pictures", "user_id", {
foreignKeys: {
columns: "id",
references: "users",
Expand All @@ -19,5 +20,5 @@ exports.up = pgm => {

exports.down = pgm => {
pgm.dropTable("users_pictures", { ifExists: true, cascade: true });
pgm.dropConstraint("users", "user_ids", { ifExists: true, cascade: true });
pgm.dropConstraint("users_pictures", "user_ids", { ifExists: true, cascade: true });
};
4 changes: 2 additions & 2 deletions src/user/controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -167,9 +167,9 @@ const disable = async (req, res) => {
};

const upload = async (req, res) => {
const { file, auth_user_id: user_id } = req;
const { file = null, auth_user_id: user_id } = req;

const picture = await User.upload({ url: file.filename, user_id });
const picture = await User.upload({ url: process.env.UPLOAD_STORAGE === "diskstorage" ? file.filename : file.location, user_id });

if (!picture || picture.error) {
return res.json(picture);
Expand Down
29 changes: 19 additions & 10 deletions src/utils/upload/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,24 @@ const fileCheck = (req, file, next) => {
};

const s3 = new AWS.S3({
accessKeyId: "",
secretAccessKey: "",
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY,
});

const s3fileUpload = multer({
const S3Storage = {
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: 'bucket-name',
bucket: process.env.AWS_S3_BUCKETNAME,
metadata: (req, file, cb) => { cb(null, { fieldName: file.fieldname })},
key: (req, file, cb) => { cb(null, Date.now().toString() + '-' + file.originalName) },
key: (req, file, cb) => {
const uniquePrefix = Date.now() + '-' + Math.round(Math.random() * 1E9);
const fileExtension = path.extname(file.originalname);
cb(null, uniquePrefix + fileExtension) },
}),
limits: { fileSize: 5*1024*1024 }, // 5mb
limits: { fileSize: 5 * 1024 * 1024 }, // 5mb
fileFilter: fileCheck,
});
};

const storage = multer.diskStorage({
destination: function (req, file, cb) {
Expand All @@ -40,11 +43,17 @@ const storage = multer.diskStorage({
},
});

const Uploader = multer({
const DiskStorage = {
storage,
limits: { fileSize: 5*1024*1024 },
limits: { fileSize: 5 * 1024 * 1024 },
fileFilter: fileCheck,
}).single('file');
};

const Uploader = multer(
process.env.UPLOAD_STORAGE === "diskstorage" ?
DiskStorage :
S3Storage
).single('file');

const fileUpload = (req, res, next) => {
Uploader(req, res, (err) => {
Expand Down

0 comments on commit 1c2d072

Please sign in to comment.