소스 검색

coalesce options `bucket` and `getKey` (#5169)

* coalesce options `bucket` and `getKey`

this is a breaking change!
also:
- remove `req` because it's an internal, unstable api that might change any time and should not be used directly
- use an params object for getKey/bucket functions instead of positional arguments, to make it more clean, and easier to add more data in the future
- add metadata to `bucket` whenever we are aware of it

* fix lint

* Apply suggestions from code review

Co-authored-by: Antoine du Hamel <antoine@transloadit.com>

* add back `Request` object

also add filename to `bucket` for consistency

* add migration steps

* fix lint

* Update docs/guides/migration-guides.md

Co-authored-by: Antoine du Hamel <antoine@transloadit.com>

---------

Co-authored-by: Antoine du Hamel <antoine@transloadit.com>
Mikael Finstad 10 달 전
부모
커밋
151da8bdfe

+ 13 - 16
docs/companion.md

@@ -283,7 +283,7 @@ const options = {
 		endpoint: 'https://{service}.{region}.amazonaws.com',
 		conditions: [],
 		useAccelerateEndpoint: false,
-		getKey: (req, filename) => `${crypto.randomUUID()}-${filename}`,
+		getKey: ({ filename }) => `${crypto.randomUUID()}-${filename}`,
 		expires: 800, // seconds
 	},
 	allowLocalUrls: false,
@@ -473,13 +473,14 @@ from the AWS SDK.
 
 The name of the bucket to store uploaded files in.
 
-It can be function that returns the name of the bucket as a `string` and takes
-the following arguments:
+A `string` or function that returns the name of the bucket as a `string` and
+takes one argument which is an object with the following properties:
 
-- [`http.IncomingMessage`][], the HTTP request (will be `null` for remote
-  uploads)
-- metadata provided by the user for the file (will be `undefined` for local
-  uploads)
+- `filename`, the original name of the uploaded file;
+- `metadata` provided by the user for the file (will only be provided during the
+  initial calls for each uploaded files, otherwise it will be `undefined`).
+- `req`, Express.js `Request` object. Do not use any Companion internals from
+  the req object, as these might change in any minor version of Companion.
 
 ##### `s3.region` `COMPANION_AWS_REGION`
 
@@ -508,18 +509,16 @@ expected, please
 [open an issue on the Uppy repository](https://github.com/transloadit/uppy/issues/new)
 so we can document it here.
 
-##### `s3.getKey(req, filename, metadata)`
+##### `s3.getKey({ filename, metadata, req })`
 
 Get the key name for a file. The key is the file path to which the file will be
 uploaded in your bucket. This option should be a function receiving three
 arguments:
 
-- `req` [`http.IncomingMessage`][], the HTTP request, for _regular_ S3 uploads
-  using the `@uppy/aws-s3` plugin. This parameter is _not_ available for
-  multipart uploads using the `@uppy/aws-s3` or `@uppy/aws-s3-multipart`
-  plugins. This parameter is `null` for remote uploads.
 - `filename`, the original name of the uploaded file;
 - `metadata`, user-provided metadata for the file.
+- `req`, Express.js `Request` object. Do not use any Companion internals from
+  the req object, as these might change in any minor version of Companion.
 
 This function should return a string `key`. The `req` parameter can be used to
 upload to a user-specific folder in your bucket, for example:
@@ -530,7 +529,7 @@ app.use(
 	uppy.app({
 		providerOptions: {
 			s3: {
-				getKey: (req, filename, metadata) => `${req.user.id}/${filename}`,
+				getKey: ({ req, filename, metadata }) => `${req.user.id}/${filename}`,
 				/* auth options */
 			},
 		},
@@ -546,7 +545,7 @@ app.use(
 	uppy.app({
 		providerOptions: {
 			s3: {
-				getKey: (req, filename, metadata) => filename,
+				getKey: ({ filename, metadata }) => filename,
 			},
 		},
 	}),
@@ -909,8 +908,6 @@ This would get the Companion instance running on `http://localhost:3020`. It
 uses [`node --watch`](https://nodejs.org/api/cli.html#--watch) so it will
 automatically restart when files are changed.
 
-[`http.incomingmessage`]:
-	https://nodejs.org/api/http.html#class-httpincomingmessage
 [box]: /docs/box
 [dropbox]: /docs/dropbox
 [facebook]: /docs/facebook

+ 4 - 0
docs/guides/migration-guides.md

@@ -11,6 +11,10 @@ These cover all the major Uppy versions and how to migrate to them.
 - The URL endpoint (used by the `Url`/`Link` plugin) is now turned off by
   default and must be explicitly enabled with
   `COMPANION_ENABLE_URL_ENDPOINT=true` or `enableUrlEndpoint: true`.
+- The option `getKey(req, filename, metadata)` has changed signature to
+  `getKey({ filename, metadata, req })`.
+- The option `bucket(req, metadata)` has changed signature to
+  `bucketOrFn({ req, metadata, filename })`.
 - Custom provider breaking changes. If you have not implemented a custom
   provider, you should not be affected.
   - The static `getExtraConfig` property has been renamed to

+ 1 - 1
examples/aws-companion/server.cjs

@@ -29,7 +29,7 @@ const options = {
     },
   },
   s3: {
-    getKey: (req, filename) => `${crypto.randomUUID()}-${filename}`,
+    getKey: ({ filename }) => `${crypto.randomUUID()}-${filename}`,
     key: process.env.COMPANION_AWS_KEY,
     secret: process.env.COMPANION_AWS_SECRET,
     bucket: process.env.COMPANION_AWS_BUCKET,

+ 1 - 1
examples/digitalocean-spaces/server.cjs

@@ -37,7 +37,7 @@ const { app: companionApp } = companion.app({
   s3: {
     // This is the crucial part; set an endpoint template for the service you want to use.
     endpoint: 'https://{region}.digitaloceanspaces.com',
-    getKey: (req, filename) => `${crypto.randomUUID()}-${filename}`,
+    getKey: ({ filename }) => `${crypto.randomUUID()}-${filename}`,
 
     key: process.env.COMPANION_AWS_KEY,
     secret: process.env.COMPANION_AWS_SECRET,

+ 14 - 11
packages/@uppy/companion/src/server/Uploader.js

@@ -220,7 +220,7 @@ class Uploader {
     if (this.readStream) this.readStream.destroy(err)
   }
 
-  async _uploadByProtocol() {
+  async _uploadByProtocol(req) {
     // todo a default protocol should not be set. We should ensure that the user specifies their protocol.
     // after we drop old versions of uppy client we can remove this
     const protocol = this.options.protocol || PROTOCOLS.multipart
@@ -229,7 +229,7 @@ class Uploader {
       case PROTOCOLS.multipart:
         return this.#uploadMultipart(this.readStream)
       case PROTOCOLS.s3Multipart:
-        return this.#uploadS3Multipart(this.readStream)
+        return this.#uploadS3Multipart(this.readStream, req)
       case PROTOCOLS.tus:
         return this.#uploadTus(this.readStream)
       default:
@@ -271,8 +271,9 @@ class Uploader {
   /**
    *
    * @param {import('stream').Readable} stream
+   * @param {import('express').Request} req
    */
-  async uploadStream(stream) {
+  async uploadStream(stream, req) {
     try {
       if (this.#uploadState !== states.idle) throw new Error('Can only start an upload in the idle state')
       if (this.readStream) throw new Error('Already uploading')
@@ -290,7 +291,7 @@ class Uploader {
       if (this.#uploadState !== states.uploading) return undefined
 
       const { url, extraData } = await Promise.race([
-        this._uploadByProtocol(),
+        this._uploadByProtocol(req),
         // If we don't handle stream errors, we get unhandled error in node.
         new Promise((resolve, reject) => this.readStream.on('error', reject)),
       ])
@@ -310,12 +311,13 @@ class Uploader {
   /**
    *
    * @param {import('stream').Readable} stream
+   * @param {import('express').Request} req
    */
-  async tryUploadStream(stream) {
+  async tryUploadStream(stream, req) {
     try {
       emitter().emit('upload-start', { token: this.token })
 
-      const ret = await this.uploadStream(stream)
+      const ret = await this.uploadStream(stream, req)
       if (!ret) return
       const { url, extraData } = ret
       this.#emitSuccess(url, extraData)
@@ -637,7 +639,7 @@ class Uploader {
   /**
    * Upload the file to S3 using a Multipart upload.
    */
-  async #uploadS3Multipart(stream) {
+  async #uploadS3Multipart(stream, req) {
     if (!this.options.s3) {
       throw new Error('The S3 client is not configured on this companion instance.')
     }
@@ -647,13 +649,14 @@ class Uploader {
      * @type {{client: import('@aws-sdk/client-s3').S3Client, options: Record<string, any>}}
      */
     const s3Options = this.options.s3
+    const { metadata } = this.options
     const { client, options } = s3Options
 
     const params = {
-      Bucket: getBucket(options.bucket, null, this.options.metadata),
-      Key: options.getKey(null, filename, this.options.metadata),
-      ContentType: this.options.metadata.type,
-      Metadata: rfc2047EncodeMetadata(this.options.metadata),
+      Bucket: getBucket({ bucketOrFn: options.bucket, req, metadata }),
+      Key: options.getKey({ req, filename, metadata }),
+      ContentType: metadata.type,
+      Metadata: rfc2047EncodeMetadata(metadata),
       Body: stream,
     }
 

+ 15 - 11
packages/@uppy/companion/src/server/controllers/s3.js

@@ -52,10 +52,11 @@ module.exports = function s3 (config) {
     const client = getS3Client(req, res)
     if (!client) return
 
-    const bucket = getBucket(config.bucket, req)
+    const { metadata = {}, filename } = req.query
 
-    const metadata = req.query.metadata || {}
-    const key = config.getKey(req, req.query.filename, metadata)
+    const bucket = getBucket({ bucketOrFn: config.bucket, req, filename, metadata })
+
+    const key = config.getKey({ req, filename, metadata })
     if (typeof key !== 'string') {
       res.status(500).json({ error: 'S3 uploads are misconfigured: filename returned from `getKey` must be a string' })
       return
@@ -106,8 +107,12 @@ module.exports = function s3 (config) {
     const client = getS3Client(req, res)
     if (!client) return
 
-    const key = config.getKey(req, req.body.filename, req.body.metadata || {})
-    const { type, metadata } = req.body
+    const { type, metadata = {}, filename } = req.body
+
+    const key = config.getKey({ req, filename, metadata })
+
+    const bucket = getBucket({ bucketOrFn: config.bucket, req, filename, metadata })
+
     if (typeof key !== 'string') {
       res.status(500).json({ error: 's3: filename returned from `getKey` must be a string' })
       return
@@ -116,7 +121,6 @@ module.exports = function s3 (config) {
       res.status(400).json({ error: 's3: content type must be a string' })
       return
     }
-    const bucket = getBucket(config.bucket, req)
 
     const params = {
       Bucket: bucket,
@@ -160,7 +164,7 @@ module.exports = function s3 (config) {
       return
     }
 
-    const bucket = getBucket(config.bucket, req)
+    const bucket = getBucket({ bucketOrFn: config.bucket, req })
 
     const parts = []
 
@@ -211,7 +215,7 @@ module.exports = function s3 (config) {
       return
     }
 
-    const bucket = getBucket(config.bucket, req)
+    const bucket = getBucket({ bucketOrFn: config.bucket, req })
 
     getSignedUrl(client, new UploadPartCommand({
       Bucket: bucket,
@@ -260,7 +264,7 @@ module.exports = function s3 (config) {
       return
     }
 
-    const bucket = getBucket(config.bucket, req)
+    const bucket = getBucket({ bucketOrFn: config.bucket, req })
 
     Promise.all(
       partNumbersArray.map((partNumber) => {
@@ -303,7 +307,7 @@ module.exports = function s3 (config) {
       return
     }
 
-    const bucket = getBucket(config.bucket, req)
+    const bucket = getBucket({ bucketOrFn: config.bucket, req })
 
     client.send(new AbortMultipartUploadCommand({
       Bucket: bucket,
@@ -344,7 +348,7 @@ module.exports = function s3 (config) {
       return
     }
 
-    const bucket = getBucket(config.bucket, req)
+    const bucket = getBucket({ bucketOrFn: config.bucket, req })
 
     client.send(new CompleteMultipartUploadCommand({
       Bucket: bucket,

+ 1 - 1
packages/@uppy/companion/src/server/helpers/upload.js

@@ -21,7 +21,7 @@ async function startDownUpload({ req, res, getSize, download }) {
         await uploader.awaitReady(clientSocketConnectTimeout)
         logger.debug('Socket connection received. Starting remote download/upload.', null, req.id)
 
-        await uploader.tryUploadStream(stream)
+        await uploader.tryUploadStream(stream, req)
       })().catch((err) => logger.error(err))
 
     // Respond the request

+ 17 - 3
packages/@uppy/companion/src/server/helpers/utils.js

@@ -146,7 +146,7 @@ module.exports.decrypt = (encrypted, secret) => {
   return decrypted
 }
 
-module.exports.defaultGetKey = (req, filename) => `${crypto.randomUUID()}-${filename}`
+module.exports.defaultGetKey = ({ filename }) => `${crypto.randomUUID()}-${filename}`
 
 class StreamHttpJsonError extends Error {
   statusCode
@@ -207,8 +207,22 @@ module.exports.rfc2047EncodeMetadata = (metadata) => (
   Object.fromEntries(Object.entries(metadata).map((entry) => entry.map(rfc2047Encode)))
 )
 
-module.exports.getBucket = (bucketOrFn, req, metadata) => {
-  const bucket = typeof bucketOrFn === 'function' ? bucketOrFn(req, metadata) : bucketOrFn
+/**
+ * 
+ * @param {{
+ * bucketOrFn: string | ((a: {
+ * req: import('express').Request,
+ * metadata: Record<string, string>,
+ * filename: string | undefined,
+ * }) => string),
+ * req: import('express').Request,
+ * metadata?: Record<string, string>,
+ * filename?: string,
+ * }} param0 
+ * @returns 
+ */
+module.exports.getBucket = ({ bucketOrFn, req, metadata, filename }) => {
+  const bucket = typeof bucketOrFn === 'function' ? bucketOrFn({ req, metadata, filename }) : bucketOrFn
 
   if (typeof bucket !== 'string' || bucket === '') {
     // This means a misconfiguration or bug

+ 5 - 3
packages/@uppy/companion/test/__tests__/uploader.js

@@ -22,6 +22,8 @@ process.env.COMPANION_DATADIR = './test/output'
 process.env.COMPANION_DOMAIN = 'localhost:3020'
 const { companionOptions } = standalone()
 
+const mockReq = {}
+
 describe('uploader with tus protocol', () => {
   test('uploader respects uploadUrls', async () => {
     const opts = {
@@ -87,7 +89,7 @@ describe('uploader with tus protocol', () => {
     })
     socketClient.onUploadSuccess(uploadToken, onUploadSuccess)
     await promise
-    await uploader.tryUploadStream(stream)
+    await uploader.tryUploadStream(stream, mockReq)
 
     expect(firstReceivedProgress).toBe(8)
 
@@ -136,7 +138,7 @@ describe('uploader with tus protocol', () => {
     return new Promise((resolve, reject) => {
       // validate that the test is resolved on socket connection
       uploader.awaitReady(60000).then(() => {
-        uploader.tryUploadStream(stream).then(() => {
+        uploader.tryUploadStream(stream, mockReq).then(() => {
           try {
             expect(fs.existsSync(uploader.path)).toBe(false)
             resolve()
@@ -286,7 +288,7 @@ describe('uploader with tus protocol', () => {
     const uploadToken = uploader.token
 
     // validate that the test is resolved on socket connection
-    uploader.awaitReady(60000).then(() => uploader.tryUploadStream(stream))
+    uploader.awaitReady(60000).then(() => uploader.tryUploadStream(stream, mockReq))
     socketClient.connect(uploadToken)
 
     return new Promise((resolve, reject) => {