Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 36 additions & 1 deletion src/http/routes/s3/commands/get-object.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { S3ProtocolHandler } from '@storage/protocols/s3/s3-handler'
import { S3Router } from '../router'
import { ROUTE_OPERATIONS } from '../../operations'
import { ERRORS } from '@internal/errors'

const GetObjectInput = {
summary: 'Get Object',
Expand All @@ -20,7 +21,17 @@ const GetObjectInput = {
'if-modified-since': { type: 'string' },
},
},
Querystring: {},
Querystring: {
type: 'object',
properties: {
'response-content-disposition': { type: 'string' },
'response-content-type': { type: 'string' },
'response-cache-control': { type: 'string' },
'response-content-encoding': { type: 'string' },
'response-content-language': { type: 'string' },
'response-expires': { type: 'string' },
},
},
} as const

const GetObjectTagging = {
Expand All @@ -42,6 +53,16 @@ const GetObjectTagging = {
},
} as const

function parseDateHeader(input?: string) {
if (input) {
const parsedDate = new Date(input)
if (isNaN(parsedDate.getTime())) {
throw ERRORS.InvalidParameter('response-expires')
}
return parsedDate
}
}

export default function GetObject(s3Router: S3Router) {
s3Router.get(
'/:Bucket/*?tagging',
Expand All @@ -63,6 +84,7 @@ export default function GetObject(s3Router: S3Router) {
const s3Protocol = new S3ProtocolHandler(ctx.storage, ctx.tenantId, ctx.owner)
const ifModifiedSince = req.Headers?.['if-modified-since']
const icebergBucket = ctx.req.internalIcebergBucketName
const responseExpires = parseDateHeader(req.Querystring?.['response-expires'])

return s3Protocol.getObject(
{
Expand All @@ -71,6 +93,12 @@ export default function GetObject(s3Router: S3Router) {
Range: req.Headers?.['range'],
IfNoneMatch: req.Headers?.['if-none-match'],
IfModifiedSince: ifModifiedSince ? new Date(ifModifiedSince) : undefined,
ResponseContentDisposition: req.Querystring?.['response-content-disposition'],
ResponseContentType: req.Querystring?.['response-content-type'],
ResponseCacheControl: req.Querystring?.['response-cache-control'],
ResponseContentEncoding: req.Querystring?.['response-content-encoding'],
ResponseContentLanguage: req.Querystring?.['response-content-language'],
ResponseExpires: responseExpires,
},
{
skipDbCheck: true,
Expand All @@ -86,6 +114,7 @@ export default function GetObject(s3Router: S3Router) {
(req, ctx) => {
const s3Protocol = new S3ProtocolHandler(ctx.storage, ctx.tenantId, ctx.owner)
const ifModifiedSince = req.Headers?.['if-modified-since']
const responseExpires = parseDateHeader(req.Querystring?.['response-expires'])

return s3Protocol.getObject(
{
Expand All @@ -94,6 +123,12 @@ export default function GetObject(s3Router: S3Router) {
Range: req.Headers?.['range'],
IfNoneMatch: req.Headers?.['if-none-match'],
IfModifiedSince: ifModifiedSince ? new Date(ifModifiedSince) : undefined,
ResponseContentDisposition: req.Querystring?.['response-content-disposition'],
ResponseContentType: req.Querystring?.['response-content-type'],
ResponseCacheControl: req.Querystring?.['response-cache-control'],
ResponseContentEncoding: req.Querystring?.['response-content-encoding'],
ResponseContentLanguage: req.Querystring?.['response-content-language'],
ResponseExpires: responseExpires,
},
{
signal: ctx.signals.response,
Expand Down
58 changes: 40 additions & 18 deletions src/storage/protocols/s3/s3-handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import { PassThrough, Readable } from 'stream'
import stream from 'stream/promises'
import { getFileSizeLimit, mustBeValidBucketName, mustBeValidKey } from '../../limits'
import { ERRORS } from '@internal/errors'
import { S3MultipartUpload, Obj } from '../../schemas'
import { S3MultipartUpload } from '../../schemas'
import { decrypt, encrypt } from '@internal/auth'
import { ByteLimitTransformStream } from './byte-limit-stream'
import { logger, logSchema } from '@internal/monitoring'
Expand Down Expand Up @@ -810,7 +810,7 @@ export class S3ProtocolHandler {
throw ERRORS.NoSuchKey(Key)
}

let metadataHeaders: Record<string, any> = {}
let metadataHeaders: Record<string, unknown> = {}

if (object.user_metadata) {
metadataHeaders = toAwsMeatadataHeaders(object.user_metadata)
Expand Down Expand Up @@ -873,7 +873,7 @@ export class S3ProtocolHandler {
const key = command.Key as string

let version: string | undefined
let userMetadata: Record<string, any> | undefined | null
let userMetadata: Record<string, unknown> | undefined | null

if (!options?.skipDbCheck) {
const object = await this.storage.from(bucket).findObject(key, 'version,user_metadata')
Expand All @@ -897,22 +897,44 @@ export class S3ProtocolHandler {
options?.signal
)

let metadataHeaders: Record<string, any> = {}
let metadataHeaders: Record<string, unknown> = {}

if (userMetadata) {
metadataHeaders = toAwsMeatadataHeaders(userMetadata)
}

const headers: Record<string, string> = {
'cache-control': response.metadata.cacheControl,
'content-length': response.metadata.contentLength?.toString() || '0',
'content-range': response.metadata.contentRange?.toString() || '',
'content-type': response.metadata.mimetype,
etag: response.metadata.eTag,
'last-modified': response.metadata.lastModified?.toUTCString() || '',
...metadataHeaders,
}

// Handle response header overrides
if (command.ResponseContentDisposition) {
headers['content-disposition'] = command.ResponseContentDisposition
}
if (command.ResponseContentType) {
headers['content-type'] = command.ResponseContentType
}
if (command.ResponseCacheControl) {
headers['cache-control'] = command.ResponseCacheControl
}
if (command.ResponseContentEncoding) {
headers['content-encoding'] = command.ResponseContentEncoding
}
if (command.ResponseContentLanguage) {
headers['content-language'] = command.ResponseContentLanguage
}
if (command.ResponseExpires) {
headers['expires'] = command.ResponseExpires.toUTCString()
}

return {
headers: {
'cache-control': response.metadata.cacheControl,
'content-length': response.metadata.contentLength?.toString() || '0',
'content-range': response.metadata.contentRange?.toString() || '',
'content-type': response.metadata.mimetype,
etag: response.metadata.eTag,
'last-modified': response.metadata.lastModified?.toUTCString() || '',
...metadataHeaders,
},
headers,
responseBody: response.body,
statusCode: command.Range ? 206 : 200,
}
Expand Down Expand Up @@ -1254,8 +1276,8 @@ export class S3ProtocolHandler {
}
}

parseMetadataHeaders(headers: Record<string, any>): Record<string, any> | undefined {
let metadata: Record<string, any> | undefined = undefined
parseMetadataHeaders(headers: Record<string, unknown>): Record<string, string> | undefined {
let metadata: Record<string, unknown> | undefined = undefined

Object.keys(headers)
.filter((key) => key.startsWith('x-amz-meta-'))
Expand Down Expand Up @@ -1334,14 +1356,14 @@ export function isValidHeader(name: string, value: string | string[]): boolean {
)
}

function toAwsMeatadataHeaders(records: Record<string, any>) {
const metadataHeaders: Record<string, any> = {}
function toAwsMeatadataHeaders(records: Record<string, unknown>) {
const metadataHeaders: Record<string, unknown> = {}
let missingCount = 0

if (records) {
Object.keys(records).forEach((key) => {
const value = records[key]
if (value && isUSASCII(value) && isValidHeader(key, value)) {
if (value && typeof value === 'string' && isUSASCII(value) && isValidHeader(key, value)) {
metadataHeaders['x-amz-meta-' + key.toLowerCase()] = value
} else {
missingCount++
Expand Down
119 changes: 118 additions & 1 deletion src/test/s3-protocol.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ async function uploadFile(
bucketName: string,
key: string,
mb: number,
headers?: Record<string, any>
headers?: Record<string, string>
) {
const uploader = new Upload({
client: client,
Expand Down Expand Up @@ -1500,6 +1500,123 @@ describe('S3 Protocol', () => {

expect(resp.ok).toBeTruthy()
})

it('supports response-content-disposition override', async () => {
const bucket = await createBucket(client)
const key = 'test-disposition.jpg'

await uploadFile(client, bucket, key, 2)

const response = await client.send(
new GetObjectCommand({
Bucket: bucket,
Key: key,
ResponseContentDisposition: 'attachment; filename="custom-name.txt"',
})
)

expect(response.ContentDisposition).toBe('attachment; filename="custom-name.txt"')
})

it('supports response-content-disposition override via presigned URL', async () => {
const bucket = await createBucket(client)
const key = 'test-presigned-disposition.jpg'

await uploadFile(client, bucket, key, 2)

const getUrl = await getSignedUrl(
client,
new GetObjectCommand({
Bucket: bucket,
Key: key,
ResponseContentDisposition: 'attachment; filename="presigned.pdf"',
}),
{ expiresIn: 100 }
)

const resp = await fetch(getUrl)

expect(resp.ok).toBeTruthy()
expect(resp.headers.get('content-disposition')).toBe('attachment; filename="presigned.pdf"')
})

it('supports response-content-type override', async () => {
const bucket = await createBucket(client)
const key = 'test-content-type.jpg'

await uploadFile(client, bucket, key, 2)

const response = await client.send(
new GetObjectCommand({
Bucket: bucket,
Key: key,
ResponseContentType: 'text/plain',
})
)

expect(response.ContentType).toBe('text/plain')
})

it('supports response-cache-control override', async () => {
const bucket = await createBucket(client)
const key = 'test-cache-control.jpg'

await uploadFile(client, bucket, key, 2)

const response = await client.send(
new GetObjectCommand({
Bucket: bucket,
Key: key,
ResponseCacheControl: 'no-cache, no-store',
})
)

expect(response.CacheControl).toBe('no-cache, no-store')
})

it('supports multiple response overrides simultaneously', async () => {
const bucket = await createBucket(client)
const key = 'test-multiple-overrides.jpg'

await uploadFile(client, bucket, key, 2)

const response = await client.send(
new GetObjectCommand({
Bucket: bucket,
Key: key,
ResponseContentDisposition: 'inline; filename="test.txt"',
ResponseContentType: 'application/octet-stream',
ResponseCacheControl: 'max-age=0',
ResponseContentLanguage: 'en-US',
ResponseContentEncoding: 'gzip',
})
)

expect(response.ContentDisposition).toBe('inline; filename="test.txt"')
expect(response.ContentType).toBe('application/octet-stream')
expect(response.CacheControl).toBe('max-age=0')
expect(response.ContentLanguage).toBe('en-US')
expect(response.ContentEncoding).toBe('gzip')
})

it('supports response-expires override', async () => {
const bucket = await createBucket(client)
const key = 'test-expires.jpg'

await uploadFile(client, bucket, key, 2)

const expiresDate = new Date('2030-01-01T00:00:00Z')

const response = await client.send(
new GetObjectCommand({
Bucket: bucket,
Key: key,
ResponseExpires: expiresDate,
})
)

expect(response.ExpiresString).toEqual(expiresDate.toUTCString())
})
})
})
})
Loading