diff --git a/.github/integ-config/integ-all.yml b/.github/integ-config/integ-all.yml index 82827def957..262b59a4b44 100644 --- a/.github/integ-config/integ-all.yml +++ b/.github/integ-config/integ-all.yml @@ -730,6 +730,15 @@ tests: spec: multi-part-copy browser: *minimal_browser_list + # GEN2 STORAGE + - test_name: integ_react_storage + desc: 'React Storage Gen2' + framework: react + category: storage + sample_name: [storage-gen2] + spec: storage-gen2 + browser: *minimal_browser_list + # INAPPMESSAGING - test_name: integ_in_app_messaging desc: 'React InApp Messaging' diff --git a/.github/workflows/callable-canary-e2e.yml b/.github/workflows/callable-canary-e2e.yml index 7c820c601a5..29ae6e6115c 100644 --- a/.github/workflows/callable-canary-e2e.yml +++ b/.github/workflows/callable-canary-e2e.yml @@ -15,7 +15,7 @@ jobs: prebuild-macos: uses: ./.github/workflows/callable-prebuild-amplify-js.yml with: - runs_on: macos-latest + runs_on: macos-12 prebuild-samples-staging: secrets: inherit uses: ./.github/workflows/callable-prebuild-samples-staging.yml diff --git a/.github/workflows/callable-e2e-test-detox.yml b/.github/workflows/callable-e2e-test-detox.yml index 8d430bb72c1..168f960c048 100644 --- a/.github/workflows/callable-e2e-test-detox.yml +++ b/.github/workflows/callable-e2e-test-detox.yml @@ -17,7 +17,7 @@ on: jobs: e2e-test: name: E2E-Detox ${{ inputs.test_name }} - runs-on: macos-latest + runs-on: macos-12 timeout-minutes: ${{ inputs.timeout_minutes }} steps: diff --git a/.github/workflows/callable-release-verification.yml b/.github/workflows/callable-release-verification.yml index 30fd4b0c7e0..d5288a615b2 100644 --- a/.github/workflows/callable-release-verification.yml +++ b/.github/workflows/callable-release-verification.yml @@ -10,7 +10,7 @@ jobs: prebuild-macos: uses: ./.github/workflows/callable-prebuild-amplify-js.yml with: - runs_on: macos-latest + runs_on: macos-12 prebuild-samples-staging: secrets: inherit uses: ./.github/workflows/callable-prebuild-samples-staging.yml diff --git a/packages/adapter-nextjs/__tests__/api/generateServerClient.test.ts b/packages/adapter-nextjs/__tests__/api/generateServerClient.test.ts index 31e28946721..d60112a9c2c 100644 --- a/packages/adapter-nextjs/__tests__/api/generateServerClient.test.ts +++ b/packages/adapter-nextjs/__tests__/api/generateServerClient.test.ts @@ -1,13 +1,11 @@ import { ResourcesConfig } from '@aws-amplify/core'; +import { parseAmplifyConfig } from '@aws-amplify/core/internals/utils'; import { generateServerClientUsingCookies, generateServerClientUsingReqRes, } from '../../src/api'; -import { - createRunWithAmplifyServerContext, - getAmplifyConfig, -} from '../../src/utils'; +import { createRunWithAmplifyServerContext } from '../../src/utils'; import { NextApiRequestMock, NextApiResponseMock } from '../mocks/headers'; import { createServerRunnerForAPI } from '../../src/api/createServerRunnerForAPI'; @@ -34,13 +32,16 @@ const mockAmplifyConfig: ResourcesConfig = { jest.mock('../../src/utils', () => ({ createRunWithAmplifyServerContext: jest.fn(() => jest.fn()), - getAmplifyConfig: jest.fn(() => mockAmplifyConfig), createCookieStorageAdapterFromNextServerContext: jest.fn(), })); +jest.mock('@aws-amplify/core/internals/utils', () => ({ + ...jest.requireActual('@aws-amplify/core/internals/utils'), + parseAmplifyConfig: jest.fn(() => mockAmplifyConfig), +})); jest.mock('aws-amplify/adapter-core'); -const mockGetAmplifyConfig = getAmplifyConfig as jest.Mock; +const mockParseAmplifyConfig = parseAmplifyConfig as jest.Mock; const mockCreateRunWithAmplifyServerContext = createRunWithAmplifyServerContext as jest.Mock; @@ -77,7 +78,7 @@ describe('generateServerClient', () => { it('should call getAmlifyConfig', async () => { generateServerClientUsingReqRes({ config: mockAmplifyConfig }); - expect(mockGetAmplifyConfig).toHaveBeenCalled(); + expect(mockParseAmplifyConfig).toHaveBeenCalled(); }); // TODO: figure out proper mocks and unskip diff --git a/packages/adapter-nextjs/__tests__/createServerRunner.test.ts b/packages/adapter-nextjs/__tests__/createServerRunner.test.ts index 678d1fbf42c..5dbaa9e0071 100644 --- a/packages/adapter-nextjs/__tests__/createServerRunner.test.ts +++ b/packages/adapter-nextjs/__tests__/createServerRunner.test.ts @@ -31,7 +31,7 @@ jest.mock( describe('createServerRunner', () => { let createServerRunner: any; - const mockParseAWSExports = jest.fn(); + const mockParseAmplifyConfig = jest.fn(); const mockCreateAWSCredentialsAndIdentityIdProvider = jest.fn(); const mockCreateKeyValueStorageFromCookieStorageAdapter = jest.fn(); const mockCreateUserPoolsTokenProvider = jest.fn(); @@ -48,23 +48,23 @@ describe('createServerRunner', () => { runWithAmplifyServerContext: mockRunWithAmplifyServerContextCore, })); jest.doMock('@aws-amplify/core/internals/utils', () => ({ - parseAWSExports: mockParseAWSExports, + parseAmplifyConfig: mockParseAmplifyConfig, })); ({ createServerRunner } = require('../src')); }); afterEach(() => { - mockParseAWSExports.mockClear(); + mockParseAmplifyConfig.mockClear(); mockCreateAWSCredentialsAndIdentityIdProvider.mockClear(); mockCreateKeyValueStorageFromCookieStorageAdapter.mockClear(); mockCreateUserPoolsTokenProvider.mockClear(); mockRunWithAmplifyServerContextCore.mockClear(); }); - it('calls parseAWSExports when the config object is imported from amplify configuration file', () => { + it('calls parseAmplifyConfig when the config object is imported from amplify configuration file', () => { createServerRunner({ config: { aws_project_region: 'us-west-2' } }); - expect(mockParseAWSExports).toHaveBeenCalled(); + expect(mockParseAmplifyConfig).toHaveBeenCalled(); }); it('returns runWithAmplifyServerContext function', () => { @@ -85,6 +85,9 @@ describe('createServerRunner', () => { }, }, }; + + mockParseAmplifyConfig.mockReturnValue(mockAmplifyConfigWithoutAuth); + const { runWithAmplifyServerContext } = createServerRunner({ config: mockAmplifyConfigWithoutAuth, }); @@ -99,6 +102,10 @@ describe('createServerRunner', () => { }); describe('when amplifyConfig.Auth is defined', () => { + beforeEach(() => { + mockParseAmplifyConfig.mockReturnValue(mockAmplifyConfig); + }); + describe('when nextServerContext is null (opt-in unauthenticated role)', () => { it('should create auth providers with sharedInMemoryStorage', () => { const { runWithAmplifyServerContext } = createServerRunner({ diff --git a/packages/adapter-nextjs/__tests__/utils/getAmplifyConfig.test.ts b/packages/adapter-nextjs/__tests__/utils/getAmplifyConfig.test.ts deleted file mode 100644 index 14532cec5b3..00000000000 --- a/packages/adapter-nextjs/__tests__/utils/getAmplifyConfig.test.ts +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -import { getAmplifyConfig } from '../../src/utils/getAmplifyConfig'; - -describe('getAmplifyConfig', () => { - const mockLegacyConfig = { - aws_project_region: 'us-west-2', - aws_cognito_identity_pool_id: '123', - aws_cognito_region: 'aws_cognito_region', - aws_user_pools_id: 'abc', - aws_user_pools_web_client_id: 'def', - oauth: {}, - aws_cognito_username_attributes: [], - aws_cognito_social_providers: [], - aws_cognito_signup_attributes: [], - aws_cognito_mfa_configuration: 'OFF', - aws_cognito_mfa_types: ['SMS'], - aws_cognito_password_protection_settings: { - passwordPolicyMinLength: 8, - passwordPolicyCharacters: [], - }, - aws_cognito_verification_mechanisms: ['PHONE_NUMBER'], - aws_user_files_s3_bucket: 'bucket', - aws_user_files_s3_bucket_region: 'us-east-1', - }; - const mockAmplifyConfig = { - Auth: { - Cognito: { - identityPoolId: '123', - userPoolId: 'abc', - userPoolClientId: 'def', - }, - }, - Storage: { - S3: { - bucket: 'bucket', - region: 'us-east-1', - }, - }, - }; - - it('returns config object that conforms to ResourcesConfig', () => { - expect(getAmplifyConfig(mockLegacyConfig)).toMatchObject(mockAmplifyConfig); - }); -}); diff --git a/packages/adapter-nextjs/src/api/createServerRunnerForAPI.ts b/packages/adapter-nextjs/src/api/createServerRunnerForAPI.ts index 94434a8a481..3c5ae6ad97a 100644 --- a/packages/adapter-nextjs/src/api/createServerRunnerForAPI.ts +++ b/packages/adapter-nextjs/src/api/createServerRunnerForAPI.ts @@ -2,8 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 import { ResourcesConfig } from '@aws-amplify/core'; +import { parseAmplifyConfig } from '@aws-amplify/core/internals/utils'; -import { createRunWithAmplifyServerContext, getAmplifyConfig } from '../utils'; +import { createRunWithAmplifyServerContext } from '../utils'; import { NextServer } from '../types'; export const createServerRunnerForAPI = ({ @@ -11,7 +12,7 @@ export const createServerRunnerForAPI = ({ }: NextServer.CreateServerRunnerInput): NextServer.CreateServerRunnerOutput & { resourcesConfig: ResourcesConfig; } => { - const amplifyConfig = getAmplifyConfig(config); + const amplifyConfig = parseAmplifyConfig(config); return { runWithAmplifyServerContext: createRunWithAmplifyServerContext({ diff --git a/packages/adapter-nextjs/src/api/generateServerClient.ts b/packages/adapter-nextjs/src/api/generateServerClient.ts index 7d8723fd716..e1c5ab09816 100644 --- a/packages/adapter-nextjs/src/api/generateServerClient.ts +++ b/packages/adapter-nextjs/src/api/generateServerClient.ts @@ -11,10 +11,12 @@ import { V6ClientSSRCookies, V6ClientSSRRequest, } from '@aws-amplify/api-graphql'; -import { GraphQLAuthMode } from '@aws-amplify/core/internals/utils'; +import { + GraphQLAuthMode, + parseAmplifyConfig, +} from '@aws-amplify/core/internals/utils'; import { NextServer } from '../types'; -import { getAmplifyConfig } from '../utils'; import { createServerRunnerForAPI } from './createServerRunnerForAPI'; @@ -98,7 +100,7 @@ export function generateServerClientUsingCookies< export function generateServerClientUsingReqRes< T extends Record = never, >({ config, authMode, authToken }: ReqClientParams): V6ClientSSRRequest { - const amplifyConfig = getAmplifyConfig(config); + const amplifyConfig = parseAmplifyConfig(config); return generateClient({ config: amplifyConfig, diff --git a/packages/adapter-nextjs/src/createServerRunner.ts b/packages/adapter-nextjs/src/createServerRunner.ts index 78d419af99b..576356fba3e 100644 --- a/packages/adapter-nextjs/src/createServerRunner.ts +++ b/packages/adapter-nextjs/src/createServerRunner.ts @@ -2,8 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 import { ResourcesConfig } from 'aws-amplify'; +import { parseAmplifyConfig } from '@aws-amplify/core/internals/utils'; -import { createRunWithAmplifyServerContext, getAmplifyConfig } from './utils'; +import { createRunWithAmplifyServerContext } from './utils'; import { NextServer } from './types'; /** @@ -27,7 +28,7 @@ import { NextServer } from './types'; export const createServerRunner: NextServer.CreateServerRunner = ({ config, }) => { - const amplifyConfig = getAmplifyConfig(config); + const amplifyConfig = parseAmplifyConfig(config); return { runWithAmplifyServerContext: createRunWithAmplifyServerContext({ diff --git a/packages/adapter-nextjs/src/types/NextServer.ts b/packages/adapter-nextjs/src/types/NextServer.ts index 107bab823e7..5c3d093b795 100644 --- a/packages/adapter-nextjs/src/types/NextServer.ts +++ b/packages/adapter-nextjs/src/types/NextServer.ts @@ -4,7 +4,7 @@ import { GetServerSidePropsContext as NextGetServerSidePropsContext } from 'next'; import { NextRequest, NextResponse } from 'next/server.js'; import { cookies } from 'next/headers.js'; -import { LegacyConfig } from 'aws-amplify/adapter-core'; +import { AmplifyOutputs, LegacyConfig } from 'aws-amplify/adapter-core'; import { AmplifyServer } from '@aws-amplify/core/internals/adapter-core'; import { ResourcesConfig } from '@aws-amplify/core'; @@ -74,7 +74,7 @@ export declare namespace NextServer { ) => Promise; export interface CreateServerRunnerInput { - config: ResourcesConfig | LegacyConfig; + config: ResourcesConfig | LegacyConfig | AmplifyOutputs; } export interface CreateServerRunnerOutput { diff --git a/packages/adapter-nextjs/src/utils/getAmplifyConfig.ts b/packages/adapter-nextjs/src/utils/getAmplifyConfig.ts deleted file mode 100644 index a8ab9f1d22b..00000000000 --- a/packages/adapter-nextjs/src/utils/getAmplifyConfig.ts +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -import { ResourcesConfig } from 'aws-amplify'; -import { LegacyConfig } from 'aws-amplify/adapter-core'; -import { parseAWSExports } from '@aws-amplify/core/internals/utils'; - -export const getAmplifyConfig = ( - config: ResourcesConfig | LegacyConfig, -): ResourcesConfig => - Object.keys(config).some(key => key.startsWith('aws_')) - ? parseAWSExports(config) - : (config as ResourcesConfig); diff --git a/packages/adapter-nextjs/src/utils/index.ts b/packages/adapter-nextjs/src/utils/index.ts index e5fd9bb5f87..68ab6cdf55c 100644 --- a/packages/adapter-nextjs/src/utils/index.ts +++ b/packages/adapter-nextjs/src/utils/index.ts @@ -1,5 +1,4 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -export { getAmplifyConfig } from './getAmplifyConfig'; export { createRunWithAmplifyServerContext } from './createRunWithAmplifyServerContext'; diff --git a/packages/api-graphql/__tests__/GraphQLAPI.test.ts b/packages/api-graphql/__tests__/GraphQLAPI.test.ts index 1dcec2bfc23..43779fee5d6 100644 --- a/packages/api-graphql/__tests__/GraphQLAPI.test.ts +++ b/packages/api-graphql/__tests__/GraphQLAPI.test.ts @@ -3,7 +3,9 @@ import { graphql, cancel, isCancelError } from '../src/internals/v6'; import { Amplify } from 'aws-amplify'; import { Amplify as AmplifyCore } from '@aws-amplify/core'; import * as typedQueries from './fixtures/with-types/queries'; +import * as typedSubscriptions from './fixtures/with-types/subscriptions'; import { expectGet } from './utils/expects'; +import { InternalGraphQLAPIClass } from '../src/internals/InternalGraphQLAPI'; import { __amplify, @@ -668,6 +670,75 @@ describe('API test', () => { ); }); + test('multi-auth default case api-key, using identityPool as auth mode', async () => { + Amplify.configure({ + API: { + GraphQL: { + defaultAuthMode: 'apiKey', + apiKey: 'FAKE-KEY', + endpoint: 'https://localhost/graphql', + region: 'local-host-h4x', + }, + }, + }); + + const threadToGet = { + id: 'some-id', + topic: 'something reasonably interesting', + }; + + const graphqlVariables = { id: 'some-id' }; + + const graphqlResponse = { + data: { + getThread: { + __typename: 'Thread', + ...serverManagedFields, + ...threadToGet, + }, + }, + }; + + const spy = jest + .spyOn((raw.GraphQLAPI as any)._api, 'post') + .mockReturnValue({ + body: { + json: () => graphqlResponse, + }, + }); + + const result: GraphQLResult = await client.graphql({ + query: typedQueries.getThread, + variables: graphqlVariables, + authMode: 'identityPool', + }); + + const thread: GetThreadQuery['getThread'] = result.data?.getThread; + const errors = result.errors; + + expect(errors).toBe(undefined); + expect(thread).toEqual(graphqlResponse.data.getThread); + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + Auth: expect.any(Object), + configure: expect.any(Function), + getConfig: expect.any(Function), + }), + { + abortController: expect.any(AbortController), + url: new URL('https://localhost/graphql'), + options: expect.objectContaining({ + headers: expect.not.objectContaining({ 'X-Api-Key': 'FAKE-KEY' }), + signingServiceInfo: expect.objectContaining({ + region: 'local-host-h4x', + service: 'appsync', + }), + }), + }, + ); + }); + test('multi-auth default case api-key, using AWS_LAMBDA as auth mode', async () => { Amplify.configure({ API: { @@ -1395,5 +1466,96 @@ describe('API test', () => { }), ); }); + + test('identityPool alias with query', async () => { + Amplify.configure({ + API: { + GraphQL: { + defaultAuthMode: 'apiKey', + apiKey: 'FAKE-KEY', + endpoint: 'https://localhost/graphql', + region: 'local-host-h4x', + }, + }, + }); + + const graphqlVariables = { id: 'some-id' }; + + const graphqlResponse = { + data: { + getThread: {}, + }, + }; + + const spy = jest.spyOn( + InternalGraphQLAPIClass.prototype as any, + '_headerBasedAuth', + ); + + const spy2 = jest + .spyOn((raw.GraphQLAPI as any)._api, 'post') + .mockReturnValue({ + body: { + json: () => graphqlResponse, + }, + }); + + await client.graphql({ + query: typedQueries.getThread, + variables: graphqlVariables, + authMode: 'identityPool', + }); + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + Auth: expect.any(Object), + configure: expect.any(Function), + getConfig: expect.any(Function), + }), + 'iam', + {}, + ); + }); + + test('identityPool alias with subscription', async () => { + Amplify.configure({ + API: { + GraphQL: { + defaultAuthMode: 'apiKey', + apiKey: 'FAKE-KEY', + endpoint: 'https://localhost/graphql', + region: 'local-host-h4x', + }, + }, + }); + + const graphqlResponse = { + data: { + getThread: {}, + }, + }; + + const spy = jest.spyOn(AWSAppSyncRealTimeProvider.prototype, 'subscribe'); + + const _spy2 = jest + .spyOn((raw.GraphQLAPI as any)._api, 'post') + .mockReturnValue({ + body: { + json: () => graphqlResponse, + }, + }); + + await client.graphql({ + query: typedSubscriptions.onCreateThread, + authMode: 'identityPool', + }); + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + authenticationType: 'iam', + }), + expect.objectContaining({}), + ); + }); }); }); diff --git a/packages/api-graphql/__tests__/fixtures/modeled/amplifyconfiguration.ts b/packages/api-graphql/__tests__/fixtures/modeled/amplifyconfiguration.ts index ab3ce689644..0a8fe6e2c44 100644 --- a/packages/api-graphql/__tests__/fixtures/modeled/amplifyconfiguration.ts +++ b/packages/api-graphql/__tests__/fixtures/modeled/amplifyconfiguration.ts @@ -1041,7 +1041,7 @@ const amplifyConfig = { type: 'key', properties: { name: 'secondaryIndexModelsByTitle', - queryField: 'listByTitle', + queryField: 'listSecondaryIndexModelByTitle', fields: ['title'], }, }, @@ -1049,7 +1049,7 @@ const amplifyConfig = { type: 'key', properties: { name: 'secondaryIndexModelsByDescriptionAndViewCount', - queryField: 'listByDescriptionAndViewCount', + queryField: 'listSecondaryIndexModelByDescriptionAndViewCount', fields: ['description', 'viewCount'], }, }, diff --git a/packages/api-graphql/__tests__/internals/__snapshots__/generateClient.test.ts.snap b/packages/api-graphql/__tests__/internals/__snapshots__/generateClient.test.ts.snap index 7550d94648c..ac5ab4860d6 100644 --- a/packages/api-graphql/__tests__/internals/__snapshots__/generateClient.test.ts.snap +++ b/packages/api-graphql/__tests__/internals/__snapshots__/generateClient.test.ts.snap @@ -5292,7 +5292,7 @@ exports[`generateClient index queries PK and SK index query 1`] = ` "options": { "body": { "query": "query ($description: String!, $viewCount: ModelIntKeyConditionInput, $filter: ModelSecondaryIndexModelFilterInput, $sortDirection: ModelSortDirection, $limit: Int, $nextToken: String) { - listByDescriptionAndViewCount( + listSecondaryIndexModelByDescriptionAndViewCount( description: $description viewCount: $viewCount filter: $filter @@ -5344,7 +5344,7 @@ exports[`generateClient index queries PK and SK index query, with sort direction "options": { "body": { "query": "query ($description: String!, $viewCount: ModelIntKeyConditionInput, $filter: ModelSecondaryIndexModelFilterInput, $sortDirection: ModelSortDirection, $limit: Int, $nextToken: String) { - listByDescriptionAndViewCount( + listSecondaryIndexModelByDescriptionAndViewCount( description: $description viewCount: $viewCount filter: $filter @@ -5397,7 +5397,7 @@ exports[`generateClient index queries PK and SK index query, with sort direction "options": { "body": { "query": "query ($description: String!, $viewCount: ModelIntKeyConditionInput, $filter: ModelSecondaryIndexModelFilterInput, $sortDirection: ModelSortDirection, $limit: Int, $nextToken: String) { - listByDescriptionAndViewCount( + listSecondaryIndexModelByDescriptionAndViewCount( description: $description viewCount: $viewCount filter: $filter @@ -5450,7 +5450,7 @@ exports[`generateClient index queries PK-only index query 1`] = ` "options": { "body": { "query": "query ($title: String!, $filter: ModelSecondaryIndexModelFilterInput, $sortDirection: ModelSortDirection, $limit: Int, $nextToken: String) { - listByTitle( + listSecondaryIndexModelByTitle( title: $title filter: $filter sortDirection: $sortDirection diff --git a/packages/api-graphql/__tests__/internals/generateClient.test.ts b/packages/api-graphql/__tests__/internals/generateClient.test.ts index 629e37efed3..af1ecd7db17 100644 --- a/packages/api-graphql/__tests__/internals/generateClient.test.ts +++ b/packages/api-graphql/__tests__/internals/generateClient.test.ts @@ -5337,9 +5337,10 @@ describe('generateClient', () => { const client = generateClient({ amplify: Amplify }); - const { data } = await client.models.SecondaryIndexModel.listByTitle({ - title: 'Hello World', - }); + const { data } = + await client.models.SecondaryIndexModel.listSecondaryIndexModelByTitle({ + title: 'Hello World', + }); expect(normalizePostGraphqlCalls(spy)).toMatchSnapshot(); @@ -5374,10 +5375,12 @@ describe('generateClient', () => { const client = generateClient({ amplify: Amplify }); const { data } = - await client.models.SecondaryIndexModel.listByDescriptionAndViewCount({ - description: 'something something', - viewCount: { gt: 4 }, - }); + await client.models.SecondaryIndexModel.listSecondaryIndexModelByDescriptionAndViewCount( + { + description: 'something something', + viewCount: { gt: 4 }, + }, + ); expect(normalizePostGraphqlCalls(spy)).toMatchSnapshot(); @@ -5427,7 +5430,7 @@ describe('generateClient', () => { const client = generateClient({ amplify: Amplify }); const { data } = - await client.models.SecondaryIndexModel.listByDescriptionAndViewCount( + await client.models.SecondaryIndexModel.listSecondaryIndexModelByDescriptionAndViewCount( { description: 'match', viewCount: { lt: 4 }, @@ -5476,7 +5479,7 @@ describe('generateClient', () => { const client = generateClient({ amplify: Amplify }); const { data } = - await client.models.SecondaryIndexModel.listByDescriptionAndViewCount( + await client.models.SecondaryIndexModel.listSecondaryIndexModelByDescriptionAndViewCount( { description: 'match', viewCount: { lt: 4 }, diff --git a/packages/api-graphql/package.json b/packages/api-graphql/package.json index f3ce9e0cc0e..7ba5727b314 100644 --- a/packages/api-graphql/package.json +++ b/packages/api-graphql/package.json @@ -86,7 +86,7 @@ "dependencies": { "@aws-amplify/api-rest": "4.0.28", "@aws-amplify/core": "6.0.28", - "@aws-amplify/data-schema": "^0.17.0", + "@aws-amplify/data-schema": "^1.0.0", "@aws-sdk/types": "3.387.0", "graphql": "15.8.0", "rxjs": "^7.8.1", diff --git a/packages/api-graphql/src/Providers/AWSAppSyncRealTimeProvider/index.ts b/packages/api-graphql/src/Providers/AWSAppSyncRealTimeProvider/index.ts index 3abde3f469d..37cafbf719e 100644 --- a/packages/api-graphql/src/Providers/AWSAppSyncRealTimeProvider/index.ts +++ b/packages/api-graphql/src/Providers/AWSAppSyncRealTimeProvider/index.ts @@ -61,6 +61,8 @@ const dispatchApiEvent = (payload: HubPayload) => { Hub.dispatch('api', payload, 'PubSub', AMPLIFY_SYMBOL); }; +// resolved/actual AuthMode values. identityPool gets resolves to IAM upstream in InternalGraphQLAPI._graphqlSubscribe +type ResolvedGraphQLAuthModes = Exclude; export interface ObserverQuery { observer: PubSubContentObserver; query: string; @@ -96,7 +98,7 @@ interface ParsedMessagePayload { export interface AWSAppSyncRealTimeProviderOptions { appSyncGraphqlEndpoint?: string; - authenticationType?: GraphQLAuthMode; + authenticationType?: ResolvedGraphQLAuthModes; query?: string; variables?: Record; apiKey?: string; @@ -935,7 +937,7 @@ export class AWSAppSyncRealTimeProvider { Record | undefined > { const headerHandler: { - [key in GraphQLAuthMode]: ( + [key in ResolvedGraphQLAuthModes]: ( arg0: AWSAppSyncRealTimeAuthInput, ) => Promise> | Record; } = { diff --git a/packages/api-graphql/src/internals/InternalGraphQLAPI.ts b/packages/api-graphql/src/internals/InternalGraphQLAPI.ts index 75dfb2e0f4e..15b89670314 100644 --- a/packages/api-graphql/src/internals/InternalGraphQLAPI.ts +++ b/packages/api-graphql/src/internals/InternalGraphQLAPI.ts @@ -259,7 +259,10 @@ export class InternalGraphQLAPIClass { defaultAuthMode, } = resolveConfig(amplify); - const authMode = explicitAuthMode || defaultAuthMode || 'iam'; + const initialAuthMode = explicitAuthMode || defaultAuthMode || 'iam'; + // identityPool is an alias for iam. TODO: remove 'iam' in v7 + const authMode = + initialAuthMode === 'identityPool' ? 'iam' : initialAuthMode; /** * Retrieve library options from Amplify configuration. @@ -425,13 +428,19 @@ export class InternalGraphQLAPIClass { private _graphqlSubscribe( amplify: AmplifyClassV6, - { query, variables, authMode }: GraphQLOptions, + { query, variables, authMode: explicitAuthMode }: GraphQLOptions, additionalHeaders: CustomHeaders = {}, customUserAgentDetails?: CustomUserAgentDetails, authToken?: string, ): Observable { const config = resolveConfig(amplify); + const initialAuthMode = + explicitAuthMode || config?.defaultAuthMode || 'iam'; + // identityPool is an alias for iam. TODO: remove 'iam' in v7 + const authMode = + initialAuthMode === 'identityPool' ? 'iam' : initialAuthMode; + /** * Retrieve library options from Amplify configuration. * `libraryConfigHeaders` are from the Amplify configuration options, @@ -449,7 +458,7 @@ export class InternalGraphQLAPIClass { variables, appSyncGraphqlEndpoint: config?.endpoint, region: config?.region, - authenticationType: authMode || config?.defaultAuthMode, + authenticationType: authMode, apiKey: config?.apiKey, additionalHeaders, authToken, diff --git a/packages/aws-amplify/__tests__/initSingleton.test.ts b/packages/aws-amplify/__tests__/initSingleton.test.ts index 2aa93209569..dd2973b8f9d 100644 --- a/packages/aws-amplify/__tests__/initSingleton.test.ts +++ b/packages/aws-amplify/__tests__/initSingleton.test.ts @@ -1,3 +1,4 @@ +/* eslint-disable camelcase */ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 @@ -7,6 +8,7 @@ import { ResourcesConfig, defaultStorage, } from '@aws-amplify/core'; +import { AmplifyOutputs } from '@aws-amplify/core/internals/utils'; import { cognitoCredentialsProvider, @@ -68,6 +70,94 @@ describe('initSingleton (DefaultAmplify)', () => { mockAmplifySingletonGetConfig.mockReset(); }); + describe('Amplify configure with AmplifyOutputs format', () => { + it('should use AmplifyOutputs config type', () => { + const amplifyOutputs: AmplifyOutputs = { + version: '1', + storage: { + aws_region: 'us-east-1', + bucket_name: 'my-bucket-name', + }, + auth: { + user_pool_id: 'us-east-1:', + user_pool_client_id: 'xxxx', + aws_region: 'us-east-1', + identity_pool_id: 'test', + }, + analytics: { + amazon_pinpoint: { + app_id: 'xxxxx', + aws_region: 'us-east-1', + }, + }, + geo: { + aws_region: 'us-east-1', + maps: { + items: { map1: { name: 'map1', style: 'color' } }, + default: 'map1', + }, + geofence_collections: { + items: ['a', 'b', 'c'], + default: 'a', + }, + search_indices: { + items: ['a', 'b', 'c'], + default: 'a', + }, + }, + }; + + Amplify.configure(amplifyOutputs); + + expect(AmplifySingleton.configure).toHaveBeenCalledWith( + { + Storage: { + S3: { + bucket: 'my-bucket-name', + region: 'us-east-1', + }, + }, + Auth: { + Cognito: { + identityPoolId: 'test', + userPoolId: 'us-east-1:', + userPoolClientId: 'xxxx', + }, + }, + Analytics: { + Pinpoint: { + appId: 'xxxxx', + region: 'us-east-1', + }, + }, + Geo: { + LocationService: { + geofenceCollections: { + default: 'a', + items: ['a', 'b', 'c'], + }, + maps: { + default: 'map1', + items: { + map1: { + name: 'map1', + style: 'color', + }, + }, + }, + region: 'us-east-1', + searchIndices: { + default: 'a', + items: ['a', 'b', 'c'], + }, + }, + }, + }, + expect.anything(), + ); + }); + }); + describe('DefaultAmplify.configure()', () => { it('should take the legacy CLI shaped config object for configuring the underlying Amplify Singleton', () => { const mockLegacyConfig = { diff --git a/packages/aws-amplify/jest.config.js b/packages/aws-amplify/jest.config.js index 7365a413e7c..5254f524623 100644 --- a/packages/aws-amplify/jest.config.js +++ b/packages/aws-amplify/jest.config.js @@ -3,7 +3,7 @@ module.exports = { coverageThreshold: { global: { branches: 85, - functions: 66, + functions: 65.5, lines: 90, statements: 91, }, diff --git a/packages/aws-amplify/package.json b/packages/aws-amplify/package.json index 2d808a2c0f8..b2ab1341bd1 100644 --- a/packages/aws-amplify/package.json +++ b/packages/aws-amplify/package.json @@ -293,31 +293,31 @@ "name": "[Analytics] record (Pinpoint)", "path": "./dist/esm/analytics/index.mjs", "import": "{ record }", - "limit": "16.50 kB" + "limit": "17.02 kB" }, { "name": "[Analytics] record (Kinesis)", "path": "./dist/esm/analytics/kinesis/index.mjs", "import": "{ record }", - "limit": "45.50 kB" + "limit": "48.56 kB" }, { "name": "[Analytics] record (Kinesis Firehose)", "path": "./dist/esm/analytics/kinesis-firehose/index.mjs", "import": "{ record }", - "limit": "42.50 kB" + "limit": "45.68 kB" }, { "name": "[Analytics] record (Personalize)", "path": "./dist/esm/analytics/personalize/index.mjs", "import": "{ record }", - "limit": "46.50 kB" + "limit": "49.50 kB" }, { "name": "[Analytics] identifyUser (Pinpoint)", "path": "./dist/esm/analytics/index.mjs", "import": "{ identifyUser }", - "limit": "15.00 kB" + "limit": "15.52 kB" }, { "name": "[Analytics] enable", @@ -335,7 +335,7 @@ "name": "[API] generateClient (AppSync)", "path": "./dist/esm/api/index.mjs", "import": "{ generateClient }", - "limit": "39.5 kB" + "limit": "40.09 kB" }, { "name": "[API] REST API handlers", @@ -353,13 +353,13 @@ "name": "[Auth] resetPassword (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ resetPassword }", - "limit": "9.02 kB" + "limit": "12.44 kB" }, { "name": "[Auth] confirmResetPassword (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ confirmResetPassword }", - "limit": "9.00 kB" + "limit": "12.39 kB" }, { "name": "[Auth] signIn (Cognito)", @@ -371,7 +371,7 @@ "name": "[Auth] resendSignUpCode (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ resendSignUpCode }", - "limit": "9.00 kB" + "limit": "12.40 kB" }, { "name": "[Auth] confirmSignUp (Cognito)", @@ -383,31 +383,31 @@ "name": "[Auth] confirmSignIn (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ confirmSignIn }", - "limit": "26.50 kB" + "limit": "28.10 kB" }, { "name": "[Auth] updateMFAPreference (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ updateMFAPreference }", - "limit": "8.6 kB" + "limit": "11.74 kB" }, { "name": "[Auth] fetchMFAPreference (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ fetchMFAPreference }", - "limit": "8.4 kB" + "limit": "11.78 kB" }, { "name": "[Auth] verifyTOTPSetup (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ verifyTOTPSetup }", - "limit": "9.18 kB" + "limit": "12.59 kB" }, { "name": "[Auth] updatePassword (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ updatePassword }", - "limit": "9.19 kB" + "limit": "12.63 kB" }, { "name": "[Auth] setUpTOTP (Cognito)", @@ -419,85 +419,85 @@ "name": "[Auth] updateUserAttributes (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ updateUserAttributes }", - "limit": "8.46 kB" + "limit": "11.87 kB" }, { "name": "[Auth] getCurrentUser (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ getCurrentUser }", - "limit": "4.32 kB" + "limit": "7.75 kB" }, { "name": "[Auth] confirmUserAttribute (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ confirmUserAttribute }", - "limit": "9.19 kB" + "limit": "12.61 kB" }, { "name": "[Auth] signInWithRedirect (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ signInWithRedirect }", - "limit": "19.44 kB" + "limit": "21.10 kB" }, { "name": "[Auth] fetchUserAttributes (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ fetchUserAttributes }", - "limit": "8.27 kB" + "limit": "11.69 kB" }, { "name": "[Auth] Basic Auth Flow (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ signIn, signOut, fetchAuthSession, confirmSignIn }", - "limit": "28.50 kB" + "limit": "29.90 kB" }, { "name": "[Auth] OAuth Auth Flow (Cognito)", "path": "./dist/esm/auth/index.mjs", "import": "{ signInWithRedirect, signOut, fetchAuthSession }", - "limit": "19.90 kB" + "limit": "21.47 kB" }, { "name": "[Storage] copy (S3)", "path": "./dist/esm/storage/index.mjs", "import": "{ copy }", - "limit": "13.50 kB" + "limit": "14.54 kB" }, { "name": "[Storage] downloadData (S3)", "path": "./dist/esm/storage/index.mjs", "import": "{ downloadData }", - "limit": "14.00 kB" + "limit": "15.17 kB" }, { "name": "[Storage] getProperties (S3)", "path": "./dist/esm/storage/index.mjs", "import": "{ getProperties }", - "limit": "13.50 kB" + "limit": "14.43 kB" }, { "name": "[Storage] getUrl (S3)", "path": "./dist/esm/storage/index.mjs", "import": "{ getUrl }", - "limit": "14.50 kB" + "limit": "15.51 kB" }, { "name": "[Storage] list (S3)", "path": "./dist/esm/storage/index.mjs", "import": "{ list }", - "limit": "14.00 kB" + "limit": "14.94 kB" }, { "name": "[Storage] remove (S3)", "path": "./dist/esm/storage/index.mjs", "import": "{ remove }", - "limit": "13.50 kB" + "limit": "14.29 kB" }, { "name": "[Storage] uploadData (S3)", "path": "./dist/esm/storage/index.mjs", "import": "{ uploadData }", - "limit": "18.50 kB" + "limit": "19.64 kB" } ] } diff --git a/packages/aws-amplify/src/adapter-core/index.ts b/packages/aws-amplify/src/adapter-core/index.ts index 81f099b9353..755f8c12b42 100644 --- a/packages/aws-amplify/src/adapter-core/index.ts +++ b/packages/aws-amplify/src/adapter-core/index.ts @@ -7,7 +7,10 @@ export { createAWSCredentialsAndIdentityIdProvider, createUserPoolsTokenProvider, } from './authProvidersFactories/cognito'; -export { LegacyConfig } from '@aws-amplify/core/internals/utils'; +export { + LegacyConfig, + AmplifyOutputs, +} from '@aws-amplify/core/internals/utils'; export { AmplifyServer, CookieStorage, diff --git a/packages/aws-amplify/src/initSingleton.ts b/packages/aws-amplify/src/initSingleton.ts index 785ed9fd214..b5de7deb56a 100644 --- a/packages/aws-amplify/src/initSingleton.ts +++ b/packages/aws-amplify/src/initSingleton.ts @@ -8,8 +8,9 @@ import { defaultStorage, } from '@aws-amplify/core'; import { + AmplifyOutputs, LegacyConfig, - parseAWSExports, + parseAmplifyConfig, } from '@aws-amplify/core/internals/utils'; import { @@ -31,16 +32,10 @@ export const DefaultAmplify = { * Amplify.configure(config); */ configure( - resourceConfig: ResourcesConfig | LegacyConfig, + resourceConfig: ResourcesConfig | LegacyConfig | AmplifyOutputs, libraryOptions?: LibraryOptions, ): void { - let resolvedResourceConfig: ResourcesConfig; - - if (Object.keys(resourceConfig).some(key => key.startsWith('aws_'))) { - resolvedResourceConfig = parseAWSExports(resourceConfig); - } else { - resolvedResourceConfig = resourceConfig as ResourcesConfig; - } + const resolvedResourceConfig = parseAmplifyConfig(resourceConfig); // If no Auth config is provided, no special handling will be required, configure as is. // Otherwise, we can assume an Auth config is provided from here on. diff --git a/packages/aws-amplify/src/utils/index.ts b/packages/aws-amplify/src/utils/index.ts index 52367f4911a..35093e8e864 100644 --- a/packages/aws-amplify/src/utils/index.ts +++ b/packages/aws-amplify/src/utils/index.ts @@ -18,4 +18,4 @@ export { KeyValueStorageInterface, } from '@aws-amplify/core'; -export { parseAWSExports as parseAmplifyConfig } from '@aws-amplify/core/internals/utils'; +export { parseAmplifyConfig } from '@aws-amplify/core/internals/utils'; diff --git a/packages/core/__tests__/amplify_outputs.json b/packages/core/__tests__/amplify_outputs.json new file mode 100644 index 00000000000..3ab7633d1a7 --- /dev/null +++ b/packages/core/__tests__/amplify_outputs.json @@ -0,0 +1,72 @@ +{ + "$schema": "adipisicing cillum", + "version": "1", + "auth": { + "aws_region": "non proident exercitation anim fugiat", + "user_pool_id": "sit velit dolor magna est", + "user_pool_client_id": "voluptate", + "identity_pool_id": "Lorem", + "oauth": { + "identity_providers": ["FACEBOOK", "SIGN_IN_WITH_APPLE", "GOOGLE"], + "domain": "proident dolore do mollit ad", + "scopes": ["incididunt proident"], + "redirect_sign_in_uri": ["Duis", "ipsum velit in dolore"], + "redirect_sign_out_uri": [ + "Excepteur pariatur cillum officia", + "incididunt in Ut Excepteur commodo" + ], + "response_type": "token" + }, + "standard_required_attributes": [ + "address", + "locale", + "family_name", + "sub", + "email" + ], + "username_attributes": ["phone_number", "email"], + "user_verification_types": ["email", "email"], + "unauthenticated_identities_enabled": true, + "mfa_configuration": "OPTIONAL", + "mfa_methods": ["TOTP", "TOTP", "SMS", "TOTP", "TOTP"] + }, + "data": { + "aws_region": "regasd", + "url": "dolore dolor do cillum nulla", + "api_key": "non", + "default_authorization_type": "API_KEY", + "authorization_types": [] + }, + "geo": { + "aws_region": "tempor", + "search_indices": { + "items": [ + "commodo Lorem", + "reprehenderit consequat", + "amet", + "aliquip deserunt", + "ea dolor in proident" + ], + "default": "exercitation fugiat ut dolor sed" + }, + "geofence_collections": { + "items": [ + "fugiat ea irure dolor", + "Ut", + "culpa ut enim exercitation", + "labore", + "ex pariatur est ullamco" + ], + "default": "ullamco incididunt aliquip" + } + }, + "custom": { + "occaecat_4_": -51806024, + "dolorc": 87599986 + }, + "notifications": { + "aws_region": "labore nisi ad", + "amazon_pinpoint_app_id": "in dolor veniam reprehenderit", + "channels": ["EMAIL"] + } +} diff --git a/packages/core/__tests__/parseAWSExports.test.ts b/packages/core/__tests__/parseAWSExports.test.ts index 4e26fc34b2d..1ad4520f7d5 100644 --- a/packages/core/__tests__/parseAWSExports.test.ts +++ b/packages/core/__tests__/parseAWSExports.test.ts @@ -27,9 +27,13 @@ describe('parseAWSExports', () => { items: ['geoJSSearchExample'], default: 'geoJSSearchExample', }, + geofenceCollections: { + items: ['geofenceCollection-dev'], + default: 'geofenceCollection-dev', + }, region, }; - const amazonLocationServiceV4 = { + const expectedAmazonLocationServiceV4 = { maps: { items: { geoJsExampleMap1: { @@ -41,14 +45,14 @@ describe('parseAWSExports', () => { }, default: 'geoJsExampleMap1', }, - search_indices: { - items: ['geoJSSearchExample'], - default: 'geoJSSearchExample', - }, searchIndices: { items: ['geoJSSearchExample'], default: 'geoJSSearchExample', }, + geofenceCollections: { + items: ['geofenceCollection-dev'], + default: 'geofenceCollection-dev', + }, region, }; const restEndpoint1 = { @@ -114,7 +118,7 @@ describe('parseAWSExports', () => { }, }, Geo: { - LocationService: amazonLocationServiceV4, + LocationService: expectedAmazonLocationServiceV4, }, Storage: { S3: { diff --git a/packages/core/__tests__/parseAmplifyOutputs.test.ts b/packages/core/__tests__/parseAmplifyOutputs.test.ts new file mode 100644 index 00000000000..53496954be5 --- /dev/null +++ b/packages/core/__tests__/parseAmplifyOutputs.test.ts @@ -0,0 +1,323 @@ +/* eslint-disable camelcase */ +import { AmplifyOutputs, parseAmplifyOutputs } from '../src/libraryUtils'; + +import mockAmplifyOutputs from './amplify_outputs.json'; + +describe('parseAmplifyOutputs tests', () => { + describe('auth tests', () => { + it('should parse from amplify-outputs.json', async () => { + const result = parseAmplifyOutputs(mockAmplifyOutputs); + + expect(result).toEqual({ + API: { + GraphQL: { + apiKey: 'non', + defaultAuthMode: 'apiKey', + endpoint: 'dolore dolor do cillum nulla', + modelIntrospection: undefined, + region: 'regasd', + }, + }, + Auth: { + Cognito: { + allowGuestAccess: true, + identityPoolId: 'Lorem', + loginWith: { + email: true, + oauth: { + domain: 'proident dolore do mollit ad', + providers: ['Facebook', 'Apple', 'Google'], + redirectSignIn: ['Duis', 'ipsum velit in dolore'], + redirectSignOut: [ + 'Excepteur pariatur cillum officia', + 'incididunt in Ut Excepteur commodo', + ], + responseType: 'token', + scopes: ['incididunt proident'], + }, + phone: true, + }, + mfa: { + smsEnabled: true, + status: 'optional', + totpEnabled: true, + }, + userAttributes: { + address: { + required: true, + }, + email: { + required: true, + }, + family_name: { + required: true, + }, + locale: { + required: true, + }, + sub: { + required: true, + }, + }, + userPoolClientId: 'voluptate', + userPoolId: 'sit velit dolor magna est', + }, + }, + Geo: { + LocationService: { + geofenceCollections: { + default: 'ullamco incididunt aliquip', + items: [ + 'fugiat ea irure dolor', + 'Ut', + 'culpa ut enim exercitation', + 'labore', + 'ex pariatur est ullamco', + ], + }, + maps: undefined, + region: 'tempor', + searchIndices: { + default: 'exercitation fugiat ut dolor sed', + items: [ + 'commodo Lorem', + 'reprehenderit consequat', + 'amet', + 'aliquip deserunt', + 'ea dolor in proident', + ], + }, + }, + }, + }); + }); + + it('should parse auth happy path (all enabled)', () => { + const amplifyOutputs = { + version: '1', + auth: { + user_pool_id: 'us-east-1:', + user_pool_client_id: 'xxxx', + aws_region: 'us-east-1', + identity_pool_id: 'test', + oauth: { + domain: 'https://cognito.com...', + redirect_sign_in_uri: ['http://localhost:3000/welcome'], + redirect_sign_out_uri: ['http://localhost:3000/come-back-soon'], + response_type: 'code', + scopes: ['profile', '...'], + identity_providers: ['GOOGLE'], + }, + password_policy: { + min_length: 8, + require_lowercase: true, + require_uppercase: true, + require_symbols: true, + require_numbers: true, + }, + standard_required_attributes: ['email'], + username_attributes: ['email'], + user_verification_types: ['email'], + unauthenticated_identities_enabled: true, + mfa_configuration: 'OPTIONAL', + mfa_methods: ['SMS'], + }, + }; + + const result = parseAmplifyOutputs(amplifyOutputs); + expect(result).toEqual({ + Auth: { + Cognito: { + allowGuestAccess: true, + identityPoolId: 'test', + mfa: { + smsEnabled: true, + status: 'optional', + totpEnabled: false, + }, + passwordFormat: { + minLength: 8, + requireLowercase: true, + requireNumbers: true, + requireSpecialCharacters: true, + requireUppercase: true, + }, + userAttributes: { + email: { + required: true, + }, + }, + userPoolClientId: 'xxxx', + userPoolId: 'us-east-1:', + loginWith: { + email: true, + oauth: { + domain: 'https://cognito.com...', + providers: ['Google'], + redirectSignIn: ['http://localhost:3000/welcome'], + redirectSignOut: ['http://localhost:3000/come-back-soon'], + responseType: 'code', + scopes: ['profile', '...'], + }, + }, + }, + }, + }); + }); + }); + + describe('storage tests', () => { + it('should parse storage happy path', () => { + const amplifyOutputs: AmplifyOutputs = { + version: '1', + storage: { + aws_region: 'us-west-2', + bucket_name: 'storage-bucket-test', + }, + }; + + const result = parseAmplifyOutputs(amplifyOutputs); + + expect(result).toEqual({ + Storage: { + S3: { + bucket: 'storage-bucket-test', + region: 'us-west-2', + }, + }, + }); + }); + }); + + describe('analytics tests', () => { + it('should parse all providers', () => { + const amplifyOutputs: AmplifyOutputs = { + version: '1', + analytics: { + amazon_pinpoint: { + app_id: 'xxxxx', + aws_region: 'us-east-1', + }, + }, + }; + + const result = parseAmplifyOutputs(amplifyOutputs); + + expect(result).toEqual({ + Analytics: { + Pinpoint: { + appId: 'xxxxx', + region: 'us-east-1', + }, + }, + }); + }); + }); + + describe('geo tests', () => { + it('should parse LocationService config', () => { + const amplifyOutputs: AmplifyOutputs = { + version: '1', + geo: { + aws_region: 'us-east-1', + maps: { + items: { + map1: { style: 'color' }, + }, + default: 'map1', + }, + geofence_collections: { + items: ['a', 'b', 'c'], + default: 'a', + }, + search_indices: { + items: ['a', 'b', 'c'], + default: 'a', + }, + }, + }; + const result = parseAmplifyOutputs(amplifyOutputs); + expect(result).toEqual({ + Geo: { + LocationService: { + geofenceCollections: { + default: 'a', + items: ['a', 'b', 'c'], + }, + maps: { + default: 'map1', + items: { + map1: { + style: 'color', + }, + }, + }, + region: 'us-east-1', + searchIndices: { + default: 'a', + items: ['a', 'b', 'c'], + }, + }, + }, + }); + }); + }); + + describe('data tests', () => { + it('should configure data', () => { + const amplifyOutputs: AmplifyOutputs = { + version: '1', + data: { + aws_region: 'us-west-2', + url: 'https://api.appsyncaws.com/graphql', + authorization_types: ['API_KEY'], + default_authorization_type: 'API_KEY', + api_key: 'da-xxxx', + }, + }; + + const result = parseAmplifyOutputs(amplifyOutputs); + expect(result).toEqual({ + API: { + GraphQL: { + endpoint: 'https://api.appsyncaws.com/graphql', + region: 'us-west-2', + apiKey: 'da-xxxx', + defaultAuthMode: 'apiKey', + }, + }, + }); + }); + + describe('notifications tests', () => { + it('should configure notifications', () => { + const amplifyOutputs: AmplifyOutputs = { + version: '1', + notifications: { + aws_region: 'us-west-2', + amazon_pinpoint_app_id: 'appid123', + channels: ['APNS', 'EMAIL', 'FCM', 'IN_APP_MESSAGING', 'SMS'], + }, + }; + + const result = parseAmplifyOutputs(amplifyOutputs); + expect(result).toEqual({ + Notifications: { + InAppMessaging: { + Pinpoint: { + appId: 'appid123', + region: 'us-west-2', + }, + }, + PushNotification: { + Pinpoint: { + appId: 'appid123', + region: 'us-west-2', + }, + }, + }, + }); + }); + }); + }); +}); diff --git a/packages/core/__tests__/utils/parseAmplifyConfig.test.ts b/packages/core/__tests__/utils/parseAmplifyConfig.test.ts new file mode 100644 index 00000000000..1b3022c8753 --- /dev/null +++ b/packages/core/__tests__/utils/parseAmplifyConfig.test.ts @@ -0,0 +1,76 @@ +/* eslint-disable camelcase */ +import { ResourcesConfig } from '../../src'; +import { parseAmplifyConfig } from '../../src/libraryUtils'; +import { parseAWSExports } from '../../src/parseAWSExports'; +import { + isAmplifyOutputs, + parseAmplifyOutputs, +} from '../../src/parseAmplifyOutputs'; + +jest.mock('../../src/parseAWSExports'); +jest.mock('../../src/parseAmplifyOutputs'); + +const testAmplifyOutputs = { + version: '1', + auth: { + user_pool_id: 'us-east-1:', + user_pool_client_id: 'xxxx', + aws_region: 'us-east-1', + }, +}; + +const testLegacyConfig = { + aws_project_region: 'us-west-2', + aws_user_pools_id: 'user-pool-id', + aws_user_pools_web_client_id: 'user-pool-client-id', +}; + +const testResourcesConfig: ResourcesConfig = { + Auth: { + Cognito: { + userPoolId: 'us-east-1:xxx', + userPoolClientId: 'xxxx', + identityPoolId: 'test', + }, + }, +}; + +describe('parseAmplifyConfig', () => { + const mockParseAWSExports = parseAWSExports as jest.Mock; + const mockParseAmplifyOutputs = parseAmplifyOutputs as jest.Mock; + const mockIsAmplifyOutputs = isAmplifyOutputs as unknown as jest.Mock; + + beforeEach(() => { + jest.clearAllMocks(); + mockParseAWSExports.mockReturnValue(testResourcesConfig); + mockParseAmplifyOutputs.mockReturnValue(testResourcesConfig); + mockIsAmplifyOutputs.mockReturnValue(false); + }); + + it('returns a ResourceConfig when one is provided', () => { + const parsedConfig = parseAmplifyConfig(testResourcesConfig); + + // Verify that a provided ResourceConfig is returned back unmodified + expect(parsedConfig).toEqual(testResourcesConfig); + }); + + it('parses legacy config objects into ResourcesConfig', () => { + const parsedConfig = parseAmplifyConfig(testLegacyConfig); + + // Verify that a provided legacy config is parsed into a ResourcesConfig + expect(parsedConfig).toEqual(testResourcesConfig); + expect(mockParseAWSExports).toHaveBeenCalledTimes(1); + expect(mockParseAWSExports).toHaveBeenCalledWith(testLegacyConfig); + }); + + it('parses Gen2 config objects into ResourcesConfig', () => { + mockIsAmplifyOutputs.mockReturnValueOnce(true); + const parsedConfig = parseAmplifyConfig(testAmplifyOutputs); + + // Verify that a provided Gen2 config is parsed into a ResourcesConfig + expect(parsedConfig).toEqual(testResourcesConfig); + expect(mockParseAmplifyOutputs).toHaveBeenCalledTimes(1); + expect(mockIsAmplifyOutputs).toHaveBeenCalledTimes(1); + expect(mockParseAmplifyOutputs).toHaveBeenCalledWith(testAmplifyOutputs); + }); +}); diff --git a/packages/core/src/libraryUtils.ts b/packages/core/src/libraryUtils.ts index 623fb12b11e..1eadf9d5c5a 100644 --- a/packages/core/src/libraryUtils.ts +++ b/packages/core/src/libraryUtils.ts @@ -21,11 +21,15 @@ export { deDupeAsyncFunction, } from './utils'; export { parseAWSExports } from './parseAWSExports'; +export { isAmplifyOutputs, parseAmplifyOutputs } from './parseAmplifyOutputs'; export { LegacyConfig } from './singleton/types'; +export { AmplifyOutputs } from './singleton/AmplifyOutputs/types'; export { ADD_OAUTH_LISTENER } from './singleton/constants'; export { amplifyUuid } from './utils/amplifyUuid'; export { AmplifyUrl, AmplifyUrlSearchParams } from './utils/amplifyUrl'; +export { parseAmplifyConfig } from './utils/parseAmplifyConfig'; export { getClientInfo } from './utils'; + // Auth utilities export { decodeJWT, diff --git a/packages/core/src/parseAWSExports.ts b/packages/core/src/parseAWSExports.ts index 5bb29e1be37..295c9e211e8 100644 --- a/packages/core/src/parseAWSExports.ts +++ b/packages/core/src/parseAWSExports.ts @@ -249,15 +249,14 @@ export const parseAWSExports = ( // Geo if (geo) { const { amazon_location_service } = geo; - (amplifyConfig as any).Geo = amazon_location_service - ? { - LocationService: { - ...amazon_location_service, - searchIndices: amazon_location_service.search_indices, - region: amazon_location_service.region, - }, - } - : { ...geo }; + amplifyConfig.Geo = { + LocationService: { + maps: amazon_location_service.maps, + geofenceCollections: amazon_location_service.geofenceCollections, + searchIndices: amazon_location_service.search_indices, + region: amazon_location_service.region, + }, + }; } // REST API diff --git a/packages/core/src/parseAmplifyOutputs.ts b/packages/core/src/parseAmplifyOutputs.ts new file mode 100644 index 00000000000..930f36df2de --- /dev/null +++ b/packages/core/src/parseAmplifyOutputs.ts @@ -0,0 +1,333 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/* This is because JSON schema contains keys with snake_case */ +/* eslint-disable camelcase */ + +/* Does not like exahaustive checks */ +/* eslint-disable no-case-declarations */ + +import { + APIConfig, + APIGraphQLConfig, + GraphQLAuthMode, + ModelIntrospectionSchema, +} from './singleton/API/types'; +import { + CognitoUserPoolConfigMfaStatus, + OAuthProvider, +} from './singleton/Auth/types'; +import { NotificationsConfig } from './singleton/Notifications/types'; +import { + AmplifyOutputs, + AmplifyOutputsAnalyticsProperties, + AmplifyOutputsAuthProperties, + AmplifyOutputsDataProperties, + AmplifyOutputsGeoProperties, + AmplifyOutputsNotificationsProperties, + AmplifyOutputsStorageProperties, +} from './singleton/AmplifyOutputs/types'; +import { + AnalyticsConfig, + AuthConfig, + GeoConfig, + LegacyConfig, + ResourcesConfig, + StorageConfig, +} from './singleton/types'; + +export function isAmplifyOutputs( + config: ResourcesConfig | LegacyConfig | AmplifyOutputs, +): config is AmplifyOutputs { + // version format initially will be '1' but is expected to be something like x.y where x is major and y minor version + const { version } = config as AmplifyOutputs; + + if (!version) { + return false; + } + + return version.startsWith('1'); +} + +function parseStorage( + amplifyOutputsStorageProperties?: AmplifyOutputsStorageProperties, +): StorageConfig | undefined { + if (!amplifyOutputsStorageProperties) { + return undefined; + } + + const { bucket_name, aws_region } = amplifyOutputsStorageProperties; + + return { + S3: { + bucket: bucket_name, + region: aws_region, + }, + }; +} + +function parseAuth( + amplifyOutputsAuthProperties?: AmplifyOutputsAuthProperties, +): AuthConfig | undefined { + if (!amplifyOutputsAuthProperties) { + return undefined; + } + + const { + user_pool_id, + user_pool_client_id, + identity_pool_id, + password_policy, + mfa_configuration, + mfa_methods, + unauthenticated_identities_enabled, + oauth, + username_attributes, + standard_required_attributes, + } = amplifyOutputsAuthProperties; + + const authConfig = { + Cognito: { + userPoolId: user_pool_id, + userPoolClientId: user_pool_client_id, + }, + } as AuthConfig; + + if (identity_pool_id) { + authConfig.Cognito = { + ...authConfig.Cognito, + identityPoolId: identity_pool_id, + }; + } + + if (password_policy) { + authConfig.Cognito.passwordFormat = { + requireLowercase: password_policy.require_lowercase, + requireNumbers: password_policy.require_numbers, + requireUppercase: password_policy.require_uppercase, + requireSpecialCharacters: password_policy.require_symbols, + minLength: password_policy.min_length ?? 6, + }; + } + + if (mfa_configuration) { + authConfig.Cognito.mfa = { + status: getMfaStatus(mfa_configuration), + smsEnabled: mfa_methods?.includes('SMS'), + totpEnabled: mfa_methods?.includes('TOTP'), + }; + } + + if (unauthenticated_identities_enabled) { + authConfig.Cognito.allowGuestAccess = unauthenticated_identities_enabled; + } + + if (oauth) { + authConfig.Cognito.loginWith = { + oauth: { + domain: oauth.domain, + redirectSignIn: oauth.redirect_sign_in_uri, + redirectSignOut: oauth.redirect_sign_out_uri, + responseType: oauth.response_type === 'token' ? 'token' : 'code', + scopes: oauth.scopes, + providers: getOAuthProviders(oauth.identity_providers), + }, + }; + } + + if (username_attributes?.includes('email')) { + authConfig.Cognito.loginWith = { + ...authConfig.Cognito.loginWith, + email: true, + }; + } + + if (username_attributes?.includes('phone_number')) { + authConfig.Cognito.loginWith = { + ...authConfig.Cognito.loginWith, + phone: true, + }; + } + + if (standard_required_attributes) { + authConfig.Cognito.userAttributes = standard_required_attributes.reduce( + (acc, curr) => ({ ...acc, [curr]: { required: true } }), + {}, + ); + } + + return authConfig; +} + +export function parseAnalytics( + amplifyOutputsAnalyticsProperties?: AmplifyOutputsAnalyticsProperties, +): AnalyticsConfig | undefined { + if (!amplifyOutputsAnalyticsProperties?.amazon_pinpoint) { + return undefined; + } + + const { amazon_pinpoint } = amplifyOutputsAnalyticsProperties; + + return { + Pinpoint: { + appId: amazon_pinpoint.app_id, + region: amazon_pinpoint.aws_region, + }, + }; +} + +function parseGeo( + amplifyOutputsAnalyticsProperties?: AmplifyOutputsGeoProperties, +): GeoConfig | undefined { + if (!amplifyOutputsAnalyticsProperties) { + return undefined; + } + + const { aws_region, geofence_collections, maps, search_indices } = + amplifyOutputsAnalyticsProperties; + + return { + LocationService: { + region: aws_region, + searchIndices: search_indices, + geofenceCollections: geofence_collections, + maps, + }, + }; +} + +function parseData( + amplifyOutputsDataProperties?: AmplifyOutputsDataProperties, +): APIConfig | undefined { + if (!amplifyOutputsDataProperties) { + return undefined; + } + + const { + aws_region, + default_authorization_type, + url, + api_key, + model_introspection, + } = amplifyOutputsDataProperties; + + const GraphQL: APIGraphQLConfig = { + endpoint: url, + defaultAuthMode: getGraphQLAuthMode(default_authorization_type), + region: aws_region, + apiKey: api_key, + modelIntrospection: model_introspection as ModelIntrospectionSchema, + }; + + return { + GraphQL, + }; +} + +function parseNotifications( + amplifyOutputsNotificationsProperties?: AmplifyOutputsNotificationsProperties, +): NotificationsConfig | undefined { + if (!amplifyOutputsNotificationsProperties) { + return undefined; + } + + const { aws_region, channels, amazon_pinpoint_app_id } = + amplifyOutputsNotificationsProperties; + + const hasInAppMessaging = channels.includes('IN_APP_MESSAGING'); + const hasPushNotification = + channels.includes('APNS') || channels.includes('FCM'); + + if (!(hasInAppMessaging || hasPushNotification)) { + return undefined; + } + + // At this point, we know the Amplify outputs contains at least one supported channel + const notificationsConfig: NotificationsConfig = {} as NotificationsConfig; + + if (hasInAppMessaging) { + notificationsConfig.InAppMessaging = { + Pinpoint: { + appId: amazon_pinpoint_app_id, + region: aws_region, + }, + }; + } + + if (hasPushNotification) { + notificationsConfig.PushNotification = { + Pinpoint: { + appId: amazon_pinpoint_app_id, + region: aws_region, + }, + }; + } + + return notificationsConfig; +} + +export function parseAmplifyOutputs( + amplifyOutputs: AmplifyOutputs, +): ResourcesConfig { + const resourcesConfig: ResourcesConfig = {}; + + if (amplifyOutputs.storage) { + resourcesConfig.Storage = parseStorage(amplifyOutputs.storage); + } + + if (amplifyOutputs.auth) { + resourcesConfig.Auth = parseAuth(amplifyOutputs.auth); + } + + if (amplifyOutputs.analytics) { + resourcesConfig.Analytics = parseAnalytics(amplifyOutputs.analytics); + } + + if (amplifyOutputs.geo) { + resourcesConfig.Geo = parseGeo(amplifyOutputs.geo); + } + + if (amplifyOutputs.data) { + resourcesConfig.API = parseData(amplifyOutputs.data); + } + + if (amplifyOutputs.notifications) { + resourcesConfig.Notifications = parseNotifications( + amplifyOutputs.notifications, + ); + } + + return resourcesConfig; +} + +const authModeNames: Record = { + AMAZON_COGNITO_USER_POOLS: 'userPool', + API_KEY: 'apiKey', + AWS_IAM: 'iam', + AWS_LAMBDA: 'lambda', + OPENID_CONNECT: 'oidc', +}; + +function getGraphQLAuthMode(authType: string): GraphQLAuthMode { + return authModeNames[authType]; +} + +const providerNames: Record = { + GOOGLE: 'Google', + LOGIN_WITH_AMAZON: 'Amazon', + FACEBOOK: 'Facebook', + SIGN_IN_WITH_APPLE: 'Apple', +}; + +function getOAuthProviders(providers: string[] = []): OAuthProvider[] { + return providers.map(provider => providerNames[provider]); +} + +function getMfaStatus( + mfaConfiguration: string, +): CognitoUserPoolConfigMfaStatus { + if (mfaConfiguration === 'OPTIONAL') return 'optional'; + if (mfaConfiguration === 'REQUIRED') return 'on'; + + return 'off'; +} diff --git a/packages/core/src/singleton/API/types.ts b/packages/core/src/singleton/API/types.ts index fb4bcf8f222..20dd5bd3dfc 100644 --- a/packages/core/src/singleton/API/types.ts +++ b/packages/core/src/singleton/API/types.ts @@ -18,7 +18,7 @@ export interface LibraryAPIOptions { }; } -interface APIGraphQLConfig { +export interface APIGraphQLConfig { /** * Required GraphQL endpoint, must be a valid URL string. */ @@ -47,7 +47,7 @@ interface APIGraphQLConfig { modelIntrospection?: ModelIntrospectionSchema; } -interface APIRestConfig { +export interface APIRestConfig { /** * Required REST endpoint, must be a valid URL string. */ @@ -81,7 +81,9 @@ export type GraphQLAuthMode = | 'apiKey' | 'oidc' | 'userPool' + // @deprecated; use 'identityPool' | 'iam' + | 'identityPool' | 'lambda' | 'none'; diff --git a/packages/core/src/singleton/Amplify.ts b/packages/core/src/singleton/Amplify.ts index 87611d6c4cd..d3bcd33a2a4 100644 --- a/packages/core/src/singleton/Amplify.ts +++ b/packages/core/src/singleton/Amplify.ts @@ -1,10 +1,11 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 import { AMPLIFY_SYMBOL, Hub } from '../Hub'; -import { parseAWSExports } from '../parseAWSExports'; import { deepFreeze } from '../utils'; +import { parseAmplifyConfig } from '../libraryUtils'; import { + AmplifyOutputs, AuthConfig, LegacyConfig, LibraryOptions, @@ -48,16 +49,10 @@ export class AmplifyClass { * @param libraryOptions - Additional options for customizing the behavior of the library. */ configure( - resourcesConfig: ResourcesConfig | LegacyConfig, + resourcesConfig: ResourcesConfig | LegacyConfig | AmplifyOutputs, libraryOptions?: LibraryOptions, ): void { - let resolvedResourceConfig: ResourcesConfig; - - if (Object.keys(resourcesConfig).some(key => key.startsWith('aws_'))) { - resolvedResourceConfig = parseAWSExports(resourcesConfig); - } else { - resolvedResourceConfig = resourcesConfig as ResourcesConfig; - } + const resolvedResourceConfig = parseAmplifyConfig(resourcesConfig); this.resourcesConfig = resolvedResourceConfig; diff --git a/packages/core/src/singleton/AmplifyOutputs/types.ts b/packages/core/src/singleton/AmplifyOutputs/types.ts new file mode 100644 index 00000000000..9f03f49a7fb --- /dev/null +++ b/packages/core/src/singleton/AmplifyOutputs/types.ts @@ -0,0 +1,99 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export type AmplifyOutputsOAuthIdentityProvider = + | 'GOOGLE' + | 'FACEBOOK' + | 'LOGIN_WITH_AMAZON' + | 'SIGN_IN_WITH_APPLE'; + +export type AmplifyOutputsAuthMFAConfiguration = + | 'OPTIONAL' + | 'REQUIRED' + | 'NONE'; + +export type AmplifyOutputsAuthMFAMethod = 'SMS' | 'TOTP'; + +export interface AmplifyOutputsAuthProperties { + aws_region: string; + authentication_flow_type?: 'USER_SRP_AUTH' | 'CUSTOM_AUTH'; + user_pool_id: string; + user_pool_client_id: string; + identity_pool_id?: string; + password_policy?: { + min_length: number; + require_numbers: boolean; + require_lowercase: boolean; + require_uppercase: boolean; + require_symbols: boolean; + }; + oauth?: { + identity_providers: string[]; + domain: string; + scopes: string[]; + redirect_sign_in_uri: string[]; + redirect_sign_out_uri: string[]; + response_type: string; + }; + standard_required_attributes?: string[]; + username_attributes?: string[]; + user_verification_types?: string[]; + unauthenticated_identities_enabled?: boolean; + mfa_configuration?: string; + mfa_methods?: string[]; +} + +export interface AmplifyOutputsStorageProperties { + aws_region: string; + bucket_name: string; +} + +export interface AmplifyOutputsGeoProperties { + aws_region: string; + maps?: { + items: Record; + default: string; + }; + search_indices?: { items: string[]; default: string }; + geofence_collections?: { items: string[]; default: string }; +} + +export interface AmplifyOutputsAnalyticsProperties { + amazon_pinpoint?: { + aws_region: string; + app_id: string; + }; +} + +export type AuthType = + | 'AMAZON_COGNITO_USER_POOLS' + | 'API_KEY' + | 'AWS_IAM' + | 'AWS_LAMBDA' + | 'OPENID_CONNECT'; + +export interface AmplifyOutputsDataProperties { + aws_region: string; + url: string; + default_authorization_type: string; + authorization_types: string[]; + model_introspection?: object; + api_key?: string; + conflict_resolution_mode?: string; +} + +export interface AmplifyOutputsNotificationsProperties { + aws_region: string; + amazon_pinpoint_app_id: string; + channels: string[]; +} + +export interface AmplifyOutputs { + version?: string; + storage?: AmplifyOutputsStorageProperties; + auth?: AmplifyOutputsAuthProperties; + analytics?: AmplifyOutputsAnalyticsProperties; + geo?: AmplifyOutputsGeoProperties; + data?: AmplifyOutputsDataProperties; + notifications?: AmplifyOutputsNotificationsProperties; +} diff --git a/packages/core/src/singleton/Auth/types.ts b/packages/core/src/singleton/Auth/types.ts index f449e1f08e3..239810e8771 100644 --- a/packages/core/src/singleton/Auth/types.ts +++ b/packages/core/src/singleton/Auth/types.ts @@ -145,6 +145,8 @@ export interface AuthUserPoolConfig { }; } +export type CognitoUserPoolConfigMfaStatus = 'on' | 'off' | 'optional'; + export interface CognitoUserPoolConfig { userPoolClientId: string; userPoolId: string; @@ -158,7 +160,7 @@ export interface CognitoUserPoolConfig { }; userAttributes?: AuthConfigUserAttributes; mfa?: { - status?: 'on' | 'off' | 'optional'; + status?: CognitoUserPoolConfigMfaStatus; totpEnabled?: boolean; smsEnabled?: boolean; }; diff --git a/packages/core/src/singleton/Storage/types.ts b/packages/core/src/singleton/Storage/types.ts index 3245a008989..b21413a797a 100644 --- a/packages/core/src/singleton/Storage/types.ts +++ b/packages/core/src/singleton/Storage/types.ts @@ -3,6 +3,7 @@ import { AtLeastOne } from '../types'; +/** @deprecated This may be removed in the next major version. */ export type StorageAccessLevel = 'guest' | 'protected' | 'private'; export interface S3ProviderConfig { @@ -20,6 +21,7 @@ export interface S3ProviderConfig { export type StorageConfig = AtLeastOne; +/** @deprecated This may be removed in the next major version. */ type StoragePrefixResolver = (params: { accessLevel: StorageAccessLevel; targetIdentityId?: string; @@ -27,7 +29,15 @@ type StoragePrefixResolver = (params: { export interface LibraryStorageOptions { S3: { + /** + * @deprecated This may be removed in the next major version. + * This is currently used for Storage API signature using key as input parameter. + * */ prefixResolver?: StoragePrefixResolver; + /** + * @deprecated This may be removed in the next major version. + * This is currently used for Storage API signature using key as input parameter. + * */ defaultAccessLevel?: StorageAccessLevel; isObjectLockEnabled?: boolean; }; diff --git a/packages/core/src/singleton/types.ts b/packages/core/src/singleton/types.ts index 423ac96d23f..e2acbeb6611 100644 --- a/packages/core/src/singleton/types.ts +++ b/packages/core/src/singleton/types.ts @@ -26,6 +26,8 @@ import { import { NotificationsConfig } from './Notifications/types'; import { InteractionsConfig } from './Interactions/types'; +export { AmplifyOutputs } from './AmplifyOutputs/types'; + /** * Compatibility type representing the Amplify Gen 1 configuration file schema. This type should not be used directly. */ diff --git a/packages/core/src/utils/parseAmplifyConfig.ts b/packages/core/src/utils/parseAmplifyConfig.ts new file mode 100644 index 00000000000..424f71a7102 --- /dev/null +++ b/packages/core/src/utils/parseAmplifyConfig.ts @@ -0,0 +1,26 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { ResourcesConfig } from '../index'; +import { AmplifyOutputs } from '../singleton/AmplifyOutputs/types'; +import { LegacyConfig } from '../singleton/types'; +import { parseAWSExports } from '../parseAWSExports'; +import { isAmplifyOutputs, parseAmplifyOutputs } from '../parseAmplifyOutputs'; + +/** + * Parses the variety of configuration shapes that Amplify can accept into a ResourcesConfig. + * + * @param amplifyConfig An Amplify configuration object conforming to one of the supported schemas. + * @return A ResourcesConfig for the provided configuration object. + */ +export const parseAmplifyConfig = ( + amplifyConfig: ResourcesConfig | LegacyConfig | AmplifyOutputs, +): ResourcesConfig => { + if (Object.keys(amplifyConfig).some(key => key.startsWith('aws_'))) { + return parseAWSExports(amplifyConfig); + } else if (isAmplifyOutputs(amplifyConfig)) { + return parseAmplifyOutputs(amplifyConfig); + } else { + return amplifyConfig as ResourcesConfig; + } +}; diff --git a/packages/geo/src/providers/location-service/AmazonLocationServiceProvider.ts b/packages/geo/src/providers/location-service/AmazonLocationServiceProvider.ts index 1e466020357..b5248d5ccbc 100644 --- a/packages/geo/src/providers/location-service/AmazonLocationServiceProvider.ts +++ b/packages/geo/src/providers/location-service/AmazonLocationServiceProvider.ts @@ -151,7 +151,7 @@ export class AmazonLocationServiceProvider implements GeoProvider { */ let locationServiceInput: SearchPlaceIndexForTextCommandInput = { Text: text, - IndexName: this._config.search_indices.default, + IndexName: this._config.searchIndices.default, }; /** @@ -217,7 +217,7 @@ export class AmazonLocationServiceProvider implements GeoProvider { */ let locationServiceInput: SearchPlaceIndexForSuggestionsCommandInput = { Text: text, - IndexName: this._config.search_indices.default, + IndexName: this._config.searchIndices.default, }; /** @@ -286,8 +286,7 @@ export class AmazonLocationServiceProvider implements GeoProvider { const searchByPlaceIdInput: GetPlaceCommandInput = { PlaceId: placeId, - IndexName: - options?.searchIndexName || this._config.search_indices.default, + IndexName: options?.searchIndexName || this._config.searchIndices.default, }; const command = new GetPlaceCommand(searchByPlaceIdInput); @@ -325,7 +324,7 @@ export class AmazonLocationServiceProvider implements GeoProvider { const locationServiceInput: SearchPlaceIndexForPositionCommandInput = { Position: coordinates, - IndexName: this._config.search_indices.default, + IndexName: this._config.searchIndices.default, }; if (options) { diff --git a/packages/interactions/package.json b/packages/interactions/package.json index 83ac1116125..9e67fa5113d 100644 --- a/packages/interactions/package.json +++ b/packages/interactions/package.json @@ -89,19 +89,19 @@ "name": "Interactions (default to Lex v2)", "path": "./dist/esm/index.mjs", "import": "{ Interactions }", - "limit": "52.00 kB" + "limit": "52.52 kB" }, { "name": "Interactions (Lex v2)", "path": "./dist/esm/lex-v2/index.mjs", "import": "{ Interactions }", - "limit": "52.00 kB" + "limit": "52.52 kB" }, { "name": "Interactions (Lex v1)", "path": "./dist/esm/lex-v1/index.mjs", "import": "{ Interactions }", - "limit": "47.00 kB" + "limit": "47.33 kB" } ] } diff --git a/packages/storage/__tests__/providers/s3/apis/copy.test.ts b/packages/storage/__tests__/providers/s3/apis/copy.test.ts index e7c61ef8a3a..52eaf7c902f 100644 --- a/packages/storage/__tests__/providers/s3/apis/copy.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/copy.test.ts @@ -2,12 +2,16 @@ // SPDX-License-Identifier: Apache-2.0 import { AWSCredentials } from '@aws-amplify/core/internals/utils'; -import { Amplify } from '@aws-amplify/core'; +import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; +import { StorageError } from '../../../../src/errors/StorageError'; +import { StorageValidationErrorCode } from '../../../../src/errors/types/validation'; import { copyObject } from '../../../../src/providers/s3/utils/client'; import { copy } from '../../../../src/providers/s3/apis'; import { - CopySourceOptions, - CopyDestinationOptions, + CopyInput, + CopyWithPathInput, + CopyOutput, + CopyWithPathOutput, } from '../../../../src/providers/s3/types'; jest.mock('../../../../src/providers/s3/utils/client'); @@ -32,7 +36,6 @@ const bucket = 'bucket'; const region = 'region'; const targetIdentityId = 'targetIdentityId'; const defaultIdentityId = 'defaultIdentityId'; -const copyResult = { key: destinationKey }; const credentials: AWSCredentials = { accessKeyId: 'accessKeyId', sessionToken: 'sessionToken', @@ -63,121 +66,189 @@ describe('copy API', () => { }, }); }); - describe('Happy Path Cases:', () => { - beforeEach(() => { - mockCopyObject.mockImplementation(() => { - return { - Metadata: { key: 'value' }, - }; + + describe('Happy Cases', () => { + describe('With key', () => { + const copyWrapper = async (input: CopyInput): Promise => + copy(input); + beforeEach(() => { + mockCopyObject.mockImplementation(() => { + return { + Metadata: { key: 'value' }, + }; + }); }); - }); - afterEach(() => { - jest.clearAllMocks(); - }); - [ - { - source: { accessLevel: 'guest' }, - destination: { accessLevel: 'guest' }, - expectedSourceKey: `${bucket}/public/${sourceKey}`, - expectedDestinationKey: `public/${destinationKey}`, - }, - { - source: { accessLevel: 'guest' }, - destination: { accessLevel: 'private' }, - expectedSourceKey: `${bucket}/public/${sourceKey}`, - expectedDestinationKey: `private/${defaultIdentityId}/${destinationKey}`, - }, - { - source: { accessLevel: 'guest' }, - destination: { accessLevel: 'protected' }, - expectedSourceKey: `${bucket}/public/${sourceKey}`, - expectedDestinationKey: `protected/${defaultIdentityId}/${destinationKey}`, - }, - { - source: { accessLevel: 'private' }, - destination: { accessLevel: 'guest' }, - expectedSourceKey: `${bucket}/private/${defaultIdentityId}/${sourceKey}`, - expectedDestinationKey: `public/${destinationKey}`, - }, - { - source: { accessLevel: 'private' }, - destination: { accessLevel: 'private' }, - expectedSourceKey: `${bucket}/private/${defaultIdentityId}/${sourceKey}`, - expectedDestinationKey: `private/${defaultIdentityId}/${destinationKey}`, - }, - { - source: { accessLevel: 'private' }, - destination: { accessLevel: 'protected' }, - expectedSourceKey: `${bucket}/private/${defaultIdentityId}/${sourceKey}`, - expectedDestinationKey: `protected/${defaultIdentityId}/${destinationKey}`, - }, - { - source: { accessLevel: 'protected' }, - destination: { accessLevel: 'guest' }, - expectedSourceKey: `${bucket}/protected/${defaultIdentityId}/${sourceKey}`, - expectedDestinationKey: `public/${destinationKey}`, - }, - { - source: { accessLevel: 'protected' }, - destination: { accessLevel: 'private' }, - expectedSourceKey: `${bucket}/protected/${defaultIdentityId}/${sourceKey}`, - expectedDestinationKey: `private/${defaultIdentityId}/${destinationKey}`, - }, - { - source: { accessLevel: 'protected' }, - destination: { accessLevel: 'protected' }, - expectedSourceKey: `${bucket}/protected/${defaultIdentityId}/${sourceKey}`, - expectedDestinationKey: `protected/${defaultIdentityId}/${destinationKey}`, - }, - { - source: { accessLevel: 'protected', targetIdentityId }, - destination: { accessLevel: 'guest' }, - expectedSourceKey: `${bucket}/protected/${targetIdentityId}/${sourceKey}`, - expectedDestinationKey: `public/${destinationKey}`, - }, - { - source: { accessLevel: 'protected', targetIdentityId }, - destination: { accessLevel: 'private' }, - expectedSourceKey: `${bucket}/protected/${targetIdentityId}/${sourceKey}`, - expectedDestinationKey: `private/${defaultIdentityId}/${destinationKey}`, - }, - { - source: { accessLevel: 'protected', targetIdentityId }, - destination: { accessLevel: 'protected' }, - expectedSourceKey: `${bucket}/protected/${targetIdentityId}/${sourceKey}`, - expectedDestinationKey: `protected/${defaultIdentityId}/${destinationKey}`, - }, - ].forEach( - ({ source, destination, expectedSourceKey, expectedDestinationKey }) => { - const targetIdentityIdMsg = source?.targetIdentityId - ? `with targetIdentityId` - : ''; - it(`should copy ${source.accessLevel} ${targetIdentityIdMsg} -> ${destination.accessLevel}`, async () => { - expect.assertions(3); - expect( - await copy({ + afterEach(() => { + jest.clearAllMocks(); + }); + const testCases: Array<{ + source: { accessLevel?: StorageAccessLevel; targetIdentityId?: string }; + destination: { + accessLevel?: StorageAccessLevel; + }; + expectedSourceKey: string; + expectedDestinationKey: string; + }> = [ + { + source: { accessLevel: 'guest' }, + destination: { accessLevel: 'guest' }, + expectedSourceKey: `${bucket}/public/${sourceKey}`, + expectedDestinationKey: `public/${destinationKey}`, + }, + { + source: { accessLevel: 'guest' }, + destination: { accessLevel: 'private' }, + expectedSourceKey: `${bucket}/public/${sourceKey}`, + expectedDestinationKey: `private/${defaultIdentityId}/${destinationKey}`, + }, + { + source: { accessLevel: 'guest' }, + destination: { accessLevel: 'protected' }, + expectedSourceKey: `${bucket}/public/${sourceKey}`, + expectedDestinationKey: `protected/${defaultIdentityId}/${destinationKey}`, + }, + { + source: { accessLevel: 'private' }, + destination: { accessLevel: 'guest' }, + expectedSourceKey: `${bucket}/private/${defaultIdentityId}/${sourceKey}`, + expectedDestinationKey: `public/${destinationKey}`, + }, + { + source: { accessLevel: 'private' }, + destination: { accessLevel: 'private' }, + expectedSourceKey: `${bucket}/private/${defaultIdentityId}/${sourceKey}`, + expectedDestinationKey: `private/${defaultIdentityId}/${destinationKey}`, + }, + { + source: { accessLevel: 'private' }, + destination: { accessLevel: 'protected' }, + expectedSourceKey: `${bucket}/private/${defaultIdentityId}/${sourceKey}`, + expectedDestinationKey: `protected/${defaultIdentityId}/${destinationKey}`, + }, + { + source: { accessLevel: 'protected' }, + destination: { accessLevel: 'guest' }, + expectedSourceKey: `${bucket}/protected/${defaultIdentityId}/${sourceKey}`, + expectedDestinationKey: `public/${destinationKey}`, + }, + { + source: { accessLevel: 'protected' }, + destination: { accessLevel: 'private' }, + expectedSourceKey: `${bucket}/protected/${defaultIdentityId}/${sourceKey}`, + expectedDestinationKey: `private/${defaultIdentityId}/${destinationKey}`, + }, + { + source: { accessLevel: 'protected' }, + destination: { accessLevel: 'protected' }, + expectedSourceKey: `${bucket}/protected/${defaultIdentityId}/${sourceKey}`, + expectedDestinationKey: `protected/${defaultIdentityId}/${destinationKey}`, + }, + { + source: { accessLevel: 'protected', targetIdentityId }, + destination: { accessLevel: 'guest' }, + expectedSourceKey: `${bucket}/protected/${targetIdentityId}/${sourceKey}`, + expectedDestinationKey: `public/${destinationKey}`, + }, + { + source: { accessLevel: 'protected', targetIdentityId }, + destination: { accessLevel: 'private' }, + expectedSourceKey: `${bucket}/protected/${targetIdentityId}/${sourceKey}`, + expectedDestinationKey: `private/${defaultIdentityId}/${destinationKey}`, + }, + { + source: { accessLevel: 'protected', targetIdentityId }, + destination: { accessLevel: 'protected' }, + expectedSourceKey: `${bucket}/protected/${targetIdentityId}/${sourceKey}`, + expectedDestinationKey: `protected/${defaultIdentityId}/${destinationKey}`, + }, + ]; + testCases.forEach( + ({ + source, + destination, + expectedSourceKey, + expectedDestinationKey, + }) => { + const targetIdentityIdMsg = source?.targetIdentityId + ? `with targetIdentityId` + : ''; + it(`should copy ${source.accessLevel} ${targetIdentityIdMsg} -> ${destination.accessLevel}`, async () => { + const { key } = await copyWrapper({ source: { - ...(source as CopySourceOptions), + ...source, key: sourceKey, }, destination: { - ...(destination as CopyDestinationOptions), + ...destination, key: destinationKey, }, - }), - ).toEqual(copyResult); + }); + expect(key).toEqual(destinationKey); + expect(copyObject).toHaveBeenCalledTimes(1); + expect(copyObject).toHaveBeenCalledWith(copyObjectClientConfig, { + ...copyObjectClientBaseParams, + CopySource: expectedSourceKey, + Key: expectedDestinationKey, + }); + }); + }, + ); + }); + + describe('With path', () => { + const copyWrapper = async ( + input: CopyWithPathInput, + ): Promise => copy(input); + + beforeEach(() => { + mockCopyObject.mockImplementation(() => { + return { + Metadata: { key: 'value' }, + }; + }); + }); + afterEach(() => { + jest.clearAllMocks(); + }); + + test.each([ + { + sourcePath: 'sourcePathAsString', + expectedSourcePath: 'sourcePathAsString', + destinationPath: 'destinationPathAsString', + expectedDestinationPath: 'destinationPathAsString', + }, + { + sourcePath: () => 'sourcePathAsFunction', + expectedSourcePath: 'sourcePathAsFunction', + destinationPath: () => 'destinationPathAsFunction', + expectedDestinationPath: 'destinationPathAsFunction', + }, + ])( + 'should copy $sourcePath -> $destinationPath', + async ({ + sourcePath, + expectedSourcePath, + destinationPath, + expectedDestinationPath, + }) => { + const { path } = await copyWrapper({ + source: { path: sourcePath }, + destination: { path: destinationPath }, + }); + expect(path).toEqual(expectedDestinationPath); expect(copyObject).toHaveBeenCalledTimes(1); expect(copyObject).toHaveBeenCalledWith(copyObjectClientConfig, { ...copyObjectClientBaseParams, - CopySource: expectedSourceKey, - Key: expectedDestinationKey, + CopySource: `${bucket}/${expectedSourcePath}`, + Key: expectedDestinationPath, }); - }); - }, - ); + }, + ); + }); }); - describe('Error Path Cases:', () => { + describe('Error Cases:', () => { afterEach(() => { jest.clearAllMocks(); }); @@ -206,5 +277,34 @@ describe('copy API', () => { expect(error.$metadata.httpStatusCode).toBe(404); } }); + + it('should return a path not found error when source uses path and destination uses key', async () => { + expect.assertions(2); + try { + // @ts-expect-error + await copy({ + source: { path: 'sourcePath' }, + destination: { key: 'destinationKey' }, + }); + } catch (error: any) { + expect(error).toBeInstanceOf(StorageError); + // source uses path so destination expects path as well + expect(error.name).toBe(StorageValidationErrorCode.NoDestinationPath); + } + }); + + it('should return a key not found error when source uses key and destination uses path', async () => { + expect.assertions(2); + try { + // @ts-expect-error + await copy({ + source: { key: 'sourcePath' }, + destination: { path: 'destinationKey' }, + }); + } catch (error: any) { + expect(error).toBeInstanceOf(StorageError); + expect(error.name).toBe(StorageValidationErrorCode.NoDestinationKey); + } + }); }); }); diff --git a/packages/storage/__tests__/providers/s3/apis/downloadData.test.ts b/packages/storage/__tests__/providers/s3/apis/downloadData.test.ts index 62f1704398b..0c9c4a3d007 100644 --- a/packages/storage/__tests__/providers/s3/apis/downloadData.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/downloadData.test.ts @@ -2,11 +2,26 @@ // SPDX-License-Identifier: Apache-2.0 import { AWSCredentials } from '@aws-amplify/core/internals/utils'; -import { Amplify } from '@aws-amplify/core'; +import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; import { getObject } from '../../../../src/providers/s3/utils/client'; import { downloadData } from '../../../../src/providers/s3'; -import { createDownloadTask } from '../../../../src/providers/s3/utils'; -import { DownloadDataOptions } from '../../../../src/providers/s3/types'; +import { + createDownloadTask, + validateStorageOperationInput, +} from '../../../../src/providers/s3/utils'; +import { + DownloadDataInput, + DownloadDataWithPathInput, +} from '../../../../src/providers/s3/types'; +import { + STORAGE_INPUT_KEY, + STORAGE_INPUT_PATH, +} from '../../../../src/providers/s3/utils/constants'; +import { StorageDownloadDataOutput } from '../../../../src/types'; +import { + ItemWithKey, + ItemWithPath, +} from '../../../../src/providers/s3/types/outputs'; jest.mock('../../../../src/providers/s3/utils/client'); jest.mock('../../../../src/providers/s3/utils'); @@ -26,17 +41,28 @@ const credentials: AWSCredentials = { sessionToken: 'sessionToken', secretAccessKey: 'secretAccessKey', }; -const key = 'key'; +const inputKey = 'key'; +const inputPath = 'path'; const bucket = 'bucket'; const region = 'region'; const targetIdentityId = 'targetIdentityId'; const defaultIdentityId = 'defaultIdentityId'; +const mockDownloadResultBase = { + body: 'body', + lastModified: 'lastModified', + size: 'contentLength', + eTag: 'eTag', + metadata: 'metadata', + versionId: 'versionId', + contentType: 'contentType', +}; const mockFetchAuthSession = Amplify.Auth.fetchAuthSession as jest.Mock; const mockCreateDownloadTask = createDownloadTask as jest.Mock; +const mockValidateStorageInput = validateStorageOperationInput as jest.Mock; const mockGetConfig = Amplify.getConfig as jest.Mock; -describe('downloadData', () => { +describe('downloadData with key', () => { beforeAll(() => { mockFetchAuthSession.mockResolvedValue({ credentials, @@ -51,56 +77,69 @@ describe('downloadData', () => { }, }); }); - mockCreateDownloadTask.mockReturnValue('downloadTask'); beforeEach(() => { jest.clearAllMocks(); + + mockCreateDownloadTask.mockReturnValue('downloadTask'); + mockValidateStorageInput.mockReturnValue({ + inputType: STORAGE_INPUT_KEY, + objectKey: inputKey, + }); }); - it('should return a download task', async () => { - expect(downloadData({ key: 'key' })).toBe('downloadTask'); + it('should return a download task with key', async () => { + const mockDownloadInput: DownloadDataInput = { + key: inputKey, + options: { accessLevel: 'protected', targetIdentityId: targetIdentityId }, + }; + expect(downloadData(mockDownloadInput)).toBe('downloadTask'); }); - [ + const testCases: Array<{ + expectedKey: string; + options?: { accessLevel?: StorageAccessLevel; targetIdentityId?: string }; + }> = [ { - expectedKey: `public/${key}`, + expectedKey: `public/${inputKey}`, }, { options: { accessLevel: 'guest' }, - expectedKey: `public/${key}`, + expectedKey: `public/${inputKey}`, }, { options: { accessLevel: 'private' }, - expectedKey: `private/${defaultIdentityId}/${key}`, + expectedKey: `private/${defaultIdentityId}/${inputKey}`, }, { options: { accessLevel: 'protected' }, - expectedKey: `protected/${defaultIdentityId}/${key}`, + expectedKey: `protected/${defaultIdentityId}/${inputKey}`, }, { options: { accessLevel: 'protected', targetIdentityId }, - expectedKey: `protected/${targetIdentityId}/${key}`, + expectedKey: `protected/${targetIdentityId}/${inputKey}`, }, - ].forEach(({ options, expectedKey }) => { - const accessLevelMsg = options?.accessLevel ?? 'default'; - const targetIdentityIdMsg = options?.targetIdentityId - ? `and targetIdentityId` - : ''; - - it(`should supply the correct parameters to getObject API handler with ${accessLevelMsg} accessLevel ${targetIdentityIdMsg}`, async () => { - expect.assertions(2); + ]; + + test.each(testCases)( + 'should supply the correct parameters to getObject API handler with $expectedKey accessLevel', + async ({ options, expectedKey }) => { (getObject as jest.Mock).mockResolvedValueOnce({ Body: 'body' }); const onProgress = jest.fn(); downloadData({ - key, + key: inputKey, options: { ...options, useAccelerateEndpoint: true, onProgress, - } as DownloadDataOptions, + }, }); const job = mockCreateDownloadTask.mock.calls[0][0].job; - await job(); + const { key, body }: StorageDownloadDataOutput = await job(); + expect({ key, body }).toEqual({ + key: inputKey, + body: 'body', + }); expect(getObject).toHaveBeenCalledTimes(1); expect(getObject).toHaveBeenCalledWith( { @@ -116,41 +155,190 @@ describe('downloadData', () => { Key: expectedKey, }, ); - }); - }); + }, + ); - it('should assign the getObject API handler response to the result', async () => { - expect.assertions(2); - const lastModified = 'lastModified'; - const contentLength = 'contentLength'; - const eTag = 'eTag'; - const metadata = 'metadata'; - const versionId = 'versionId'; - const contentType = 'contentType'; - const body = 'body'; - const key = 'key'; + it('should assign the getObject API handler response to the result with key', async () => { (getObject as jest.Mock).mockResolvedValueOnce({ - Body: body, - LastModified: lastModified, - ContentLength: contentLength, - ETag: eTag, - Metadata: metadata, - VersionId: versionId, - ContentType: contentType, + Body: 'body', + LastModified: 'lastModified', + ContentLength: 'contentLength', + ETag: 'eTag', + Metadata: 'metadata', + VersionId: 'versionId', + ContentType: 'contentType', }); - downloadData({ key }); + downloadData({ key: inputKey }); const job = mockCreateDownloadTask.mock.calls[0][0].job; - const result = await job(); + const { + key, + body, + contentType, + eTag, + lastModified, + metadata, + size, + versionId, + }: StorageDownloadDataOutput = await job(); expect(getObject).toHaveBeenCalledTimes(1); - expect(result).toEqual({ + expect({ key, body, + contentType, + eTag, lastModified, - size: contentLength, + metadata, + size, + versionId, + }).toEqual({ + key: inputKey, + ...mockDownloadResultBase, + }); + }); + + it('should forward the bytes range option to the getObject API', async () => { + const start = 1; + const end = 100; + (getObject as jest.Mock).mockResolvedValueOnce({ Body: 'body' }); + + downloadData({ + key: inputKey, + options: { + bytesRange: { start, end }, + }, + }); + + const job = mockCreateDownloadTask.mock.calls[0][0].job; + await job(); + + expect(getObject).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + Range: `bytes=${start}-${end}`, + }), + ); + }); +}); + +describe('downloadData with path', () => { + beforeAll(() => { + mockFetchAuthSession.mockResolvedValue({ + credentials, + identityId: defaultIdentityId, + }); + mockGetConfig.mockReturnValue({ + Storage: { + S3: { + bucket, + region, + }, + }, + }); + mockCreateDownloadTask.mockReturnValue('downloadTask'); + mockValidateStorageInput.mockReturnValue({ + inputType: STORAGE_INPUT_PATH, + objectKey: inputPath, + }); + }); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should return a download task with path', async () => { + const mockDownloadInput: DownloadDataWithPathInput = { + path: inputPath, + options: { useAccelerateEndpoint: true }, + }; + expect(downloadData(mockDownloadInput)).toBe('downloadTask'); + }); + + test.each([ + { + path: inputPath, + expectedKey: inputPath, + }, + { + path: () => inputPath, + expectedKey: inputPath, + }, + ])( + 'should call getObject API with $expectedKey when path provided is $path', + async ({ path, expectedKey }) => { + (getObject as jest.Mock).mockResolvedValueOnce({ Body: 'body' }); + const onProgress = jest.fn(); + downloadData({ + path, + options: { + useAccelerateEndpoint: true, + onProgress, + }, + }); + const job = mockCreateDownloadTask.mock.calls[0][0].job; + const { + path: resultPath, + body, + }: StorageDownloadDataOutput = await job(); + expect({ + path: resultPath, + body, + }).toEqual({ + path: expectedKey, + body: 'body', + }); + expect(getObject).toHaveBeenCalledTimes(1); + expect(getObject).toHaveBeenCalledWith( + { + credentials, + region, + useAccelerateEndpoint: true, + onDownloadProgress: onProgress, + abortSignal: expect.any(AbortSignal), + userAgentValue: expect.any(String), + }, + { + Bucket: bucket, + Key: expectedKey, + }, + ); + }, + ); + + it('should assign the getObject API handler response to the result with path', async () => { + (getObject as jest.Mock).mockResolvedValueOnce({ + Body: 'body', + LastModified: 'lastModified', + ContentLength: 'contentLength', + ETag: 'eTag', + Metadata: 'metadata', + VersionId: 'versionId', + ContentType: 'contentType', + }); + downloadData({ path: inputPath }); + const job = mockCreateDownloadTask.mock.calls[0][0].job; + const { + path, + body, + contentType, eTag, + lastModified, metadata, + size, versionId, + }: StorageDownloadDataOutput = await job(); + expect(getObject).toHaveBeenCalledTimes(1); + expect({ + path, + body, contentType, + eTag, + lastModified, + metadata, + size, + versionId, + }).toEqual({ + path: inputPath, + ...mockDownloadResultBase, }); }); @@ -160,7 +348,7 @@ describe('downloadData', () => { (getObject as jest.Mock).mockResolvedValueOnce({ Body: 'body' }); downloadData({ - key: 'mockKey', + path: inputPath, options: { bytesRange: { start, end }, }, diff --git a/packages/storage/__tests__/providers/s3/apis/getProperties.test.ts b/packages/storage/__tests__/providers/s3/apis/getProperties.test.ts index 6ee31b031ed..191802f04f9 100644 --- a/packages/storage/__tests__/providers/s3/apis/getProperties.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/getProperties.test.ts @@ -4,8 +4,13 @@ import { headObject } from '../../../../src/providers/s3/utils/client'; import { getProperties } from '../../../../src/providers/s3'; import { AWSCredentials } from '@aws-amplify/core/internals/utils'; -import { Amplify } from '@aws-amplify/core'; -import { GetPropertiesOptions } from '../../../../src/providers/s3/types'; +import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; +import { + GetPropertiesInput, + GetPropertiesWithPathInput, + GetPropertiesOutput, + GetPropertiesWithPathOutput, +} from '../../../../src/providers/s3/types'; jest.mock('../../../../src/providers/s3/utils/client'); jest.mock('@aws-amplify/core', () => ({ @@ -19,7 +24,7 @@ jest.mock('@aws-amplify/core', () => ({ }, }, })); -const mockHeadObject = headObject as jest.Mock; +const mockHeadObject = headObject as jest.MockedFunction; const mockFetchAuthSession = Amplify.Auth.fetchAuthSession as jest.Mock; const mockGetConfig = Amplify.getConfig as jest.Mock; @@ -30,10 +35,24 @@ const credentials: AWSCredentials = { sessionToken: 'sessionToken', secretAccessKey: 'secretAccessKey', }; +const inputKey = 'key'; +const inputPath = 'path'; const targetIdentityId = 'targetIdentityId'; const defaultIdentityId = 'defaultIdentityId'; -describe('getProperties api', () => { +const expectedResult = { + size: 100, + contentType: 'text/plain', + eTag: 'etag', + metadata: { key: 'value' }, + lastModified: new Date('01-01-1980'), + versionId: 'version-id', +}; + +describe('getProperties with key', () => { + const getPropertiesWrapper = ( + input: GetPropertiesInput, + ): Promise => getProperties(input); beforeAll(() => { mockFetchAuthSession.mockResolvedValue({ credentials, @@ -48,79 +67,209 @@ describe('getProperties api', () => { }, }); }); - describe('getProperties happy path ', () => { - const expected = { - key: 'key', - size: '100', - contentType: 'text/plain', - eTag: 'etag', - metadata: { key: 'value' }, - lastModified: 'last-modified', - versionId: 'version-id', - }; + describe('Happy cases: With key', () => { const config = { credentials, region: 'region', userAgentValue: expect.any(String), }; - const key = 'key'; beforeEach(() => { - mockHeadObject.mockReturnValueOnce({ - ContentLength: '100', + mockHeadObject.mockResolvedValue({ + ContentLength: 100, ContentType: 'text/plain', ETag: 'etag', - LastModified: 'last-modified', + LastModified: new Date('01-01-1980'), Metadata: { key: 'value' }, VersionId: 'version-id', + $metadata: {} as any, }); }); afterEach(() => { jest.clearAllMocks(); }); - [ + + const testCases: Array<{ + expectedKey: string; + options?: { accessLevel?: StorageAccessLevel; targetIdentityId?: string }; + }> = [ { - expectedKey: `public/${key}`, + expectedKey: `public/${inputKey}`, }, { options: { accessLevel: 'guest' }, - expectedKey: `public/${key}`, + expectedKey: `public/${inputKey}`, }, { options: { accessLevel: 'private' }, - expectedKey: `private/${defaultIdentityId}/${key}`, + expectedKey: `private/${defaultIdentityId}/${inputKey}`, }, { options: { accessLevel: 'protected' }, - expectedKey: `protected/${defaultIdentityId}/${key}`, + expectedKey: `protected/${defaultIdentityId}/${inputKey}`, }, { options: { accessLevel: 'protected', targetIdentityId }, - expectedKey: `protected/${targetIdentityId}/${key}`, + expectedKey: `protected/${targetIdentityId}/${inputKey}`, }, - ].forEach(({ options, expectedKey }) => { - const accessLevelMsg = options?.accessLevel ?? 'default'; - const targetIdentityIdMsg = options?.targetIdentityId - ? `and targetIdentityId` - : ''; - it(`should getProperties with ${accessLevelMsg} accessLevel ${targetIdentityIdMsg}`, async () => { + ]; + test.each(testCases)( + 'should getProperties with key $expectedKey', + async ({ options, expectedKey }) => { const headObjectOptions = { Bucket: 'bucket', Key: expectedKey, }; - expect.assertions(3); - expect( - await getProperties({ - key, - options: options as GetPropertiesOptions, - }), - ).toEqual(expected); + const { + key, + contentType, + eTag, + lastModified, + metadata, + size, + versionId, + } = await getPropertiesWrapper({ + key: inputKey, + options, + }); + expect({ + key, + contentType, + eTag, + lastModified, + metadata, + size, + versionId, + }).toEqual({ + key: inputKey, + ...expectedResult, + }); expect(headObject).toHaveBeenCalledTimes(1); expect(headObject).toHaveBeenCalledWith(config, headObjectOptions); + }, + ); + }); + + describe('Error cases : With key', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + it('getProperties should return a not found error', async () => { + mockHeadObject.mockRejectedValueOnce( + Object.assign(new Error(), { + $metadata: { httpStatusCode: 404 }, + name: 'NotFound', + }), + ); + expect.assertions(3); + try { + await getPropertiesWrapper({ key: inputKey }); + } catch (error: any) { + expect(headObject).toHaveBeenCalledTimes(1); + expect(headObject).toHaveBeenCalledWith( + { + credentials, + region: 'region', + userAgentValue: expect.any(String), + }, + { + Bucket: 'bucket', + Key: `public/${inputKey}`, + }, + ); + expect(error.$metadata.httpStatusCode).toBe(404); + } + }); + }); +}); + +describe('Happy cases: With path', () => { + const getPropertiesWrapper = ( + input: GetPropertiesWithPathInput, + ): Promise => getProperties(input); + beforeAll(() => { + mockFetchAuthSession.mockResolvedValue({ + credentials, + identityId: defaultIdentityId, + }); + mockGetConfig.mockReturnValue({ + Storage: { + S3: { + bucket, + region, + }, + }, + }); + }); + describe('getProperties with path', () => { + const config = { + credentials, + region: 'region', + useAccelerateEndpoint: true, + userAgentValue: expect.any(String), + }; + beforeEach(() => { + mockHeadObject.mockResolvedValue({ + ContentLength: 100, + ContentType: 'text/plain', + ETag: 'etag', + LastModified: new Date('01-01-1980'), + Metadata: { key: 'value' }, + VersionId: 'version-id', + $metadata: {} as any, }); }); + afterEach(() => { + jest.clearAllMocks(); + }); + test.each([ + { + testPath: inputPath, + expectedPath: inputPath, + }, + { + testPath: () => inputPath, + expectedPath: inputPath, + }, + ])( + 'should getProperties with path $path and expectedPath $expectedPath', + async ({ testPath, expectedPath }) => { + const headObjectOptions = { + Bucket: 'bucket', + Key: expectedPath, + }; + const { + path, + contentType, + eTag, + lastModified, + metadata, + size, + versionId, + } = await getPropertiesWrapper({ + path: testPath, + options: { + useAccelerateEndpoint: true, + }, + }); + expect({ + path, + contentType, + eTag, + lastModified, + metadata, + size, + versionId, + }).toEqual({ + path: expectedPath, + ...expectedResult, + }); + expect(headObject).toHaveBeenCalledTimes(1); + expect(headObject).toHaveBeenCalledWith(config, headObjectOptions); + }, + ); }); - describe('getProperties error path', () => { + describe('Error cases : With path', () => { afterEach(() => { jest.clearAllMocks(); }); @@ -133,7 +282,7 @@ describe('getProperties api', () => { ); expect.assertions(3); try { - await getProperties({ key: 'keyed' }); + await getPropertiesWrapper({ path: inputPath }); } catch (error: any) { expect(headObject).toHaveBeenCalledTimes(1); expect(headObject).toHaveBeenCalledWith( @@ -144,7 +293,7 @@ describe('getProperties api', () => { }, { Bucket: 'bucket', - Key: 'public/keyed', + Key: inputPath, }, ); expect(error.$metadata.httpStatusCode).toBe(404); diff --git a/packages/storage/__tests__/providers/s3/apis/getUrl.test.ts b/packages/storage/__tests__/providers/s3/apis/getUrl.test.ts index 1cdf1a59b72..8f56299d943 100644 --- a/packages/storage/__tests__/providers/s3/apis/getUrl.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/getUrl.test.ts @@ -3,12 +3,17 @@ import { getUrl } from '../../../../src/providers/s3/apis'; import { AWSCredentials } from '@aws-amplify/core/internals/utils'; -import { Amplify } from '@aws-amplify/core'; +import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; import { getPresignedGetObjectUrl, headObject, } from '../../../../src/providers/s3/utils/client'; -import { GetUrlOptions } from '../../../../src/providers/s3/types'; +import { + GetUrlInput, + GetUrlWithPathInput, + GetUrlOutput, + GetUrlWithPathOutput, +} from '../../../../src/providers/s3/types'; jest.mock('../../../../src/providers/s3/utils/client'); jest.mock('@aws-amplify/core', () => ({ @@ -34,8 +39,11 @@ const credentials: AWSCredentials = { }; const targetIdentityId = 'targetIdentityId'; const defaultIdentityId = 'defaultIdentityId'; +const mockURL = new URL('https://google.com'); -describe('getUrl test', () => { +describe('getUrl test with key', () => { + const getUrlWrapper = (input: GetUrlInput): Promise => + getUrl(input); beforeAll(() => { mockFetchAuthSession.mockResolvedValue({ credentials, @@ -51,7 +59,7 @@ describe('getUrl test', () => { }); }); - describe('getUrl happy path', () => { + describe('Happy cases: With key', () => { const config = { credentials, region, @@ -59,24 +67,28 @@ describe('getUrl test', () => { }; const key = 'key'; beforeEach(() => { - (headObject as jest.Mock).mockImplementation(() => { - return { - Key: 'key', - ContentLength: '100', - ContentType: 'text/plain', - ETag: 'etag', - LastModified: 'last-modified', - Metadata: { key: 'value' }, - }; - }); - (getPresignedGetObjectUrl as jest.Mock).mockReturnValueOnce({ - url: new URL('https://google.com'), + (headObject as jest.MockedFunction).mockResolvedValue({ + ContentLength: 100, + ContentType: 'text/plain', + ETag: 'etag', + LastModified: new Date('01-01-1980'), + Metadata: { meta: 'value' }, + $metadata: {} as any, }); + ( + getPresignedGetObjectUrl as jest.MockedFunction< + typeof getPresignedGetObjectUrl + > + ).mockResolvedValue(mockURL); }); afterEach(() => { jest.clearAllMocks(); }); - [ + + const testCases: Array<{ + options?: { accessLevel?: StorageAccessLevel; targetIdentityId?: string }; + expectedKey: string; + }> = [ { expectedKey: `public/${key}`, }, @@ -96,34 +108,135 @@ describe('getUrl test', () => { options: { accessLevel: 'protected', targetIdentityId }, expectedKey: `protected/${targetIdentityId}/${key}`, }, - ].forEach(({ options, expectedKey }) => { - const accessLevelMsg = options?.accessLevel ?? 'default'; - const targetIdentityIdMsg = options?.targetIdentityId - ? `and targetIdentityId` - : ''; - it(`should getUrl with ${accessLevelMsg} accessLevel ${targetIdentityIdMsg}`, async () => { + ]; + + test.each(testCases)( + 'should getUrl with key $expectedKey', + async ({ options, expectedKey }) => { const headObjectOptions = { Bucket: bucket, Key: expectedKey, }; - expect.assertions(4); - const result = await getUrl({ + const { url, expiresAt } = await getUrlWrapper({ key, options: { ...options, validateObjectExistence: true, - } as GetUrlOptions, + }, }); + const expectedResult = { + url: mockURL, + expiresAt: expect.any(Date), + }; expect(getPresignedGetObjectUrl).toHaveBeenCalledTimes(1); expect(headObject).toHaveBeenCalledTimes(1); expect(headObject).toHaveBeenCalledWith(config, headObjectOptions); - expect(result.url).toEqual({ - url: new URL('https://google.com'), + expect({ url, expiresAt }).toEqual(expectedResult); + }, + ); + }); + describe('Error cases : With key', () => { + afterAll(() => { + jest.clearAllMocks(); + }); + it('should return not found error when the object is not found', async () => { + (headObject as jest.Mock).mockImplementation(() => { + throw Object.assign(new Error(), { + $metadata: { httpStatusCode: 404 }, + name: 'NotFound', }); }); + expect.assertions(2); + try { + await getUrlWrapper({ + key: 'invalid_key', + options: { validateObjectExistence: true }, + }); + } catch (error: any) { + expect(headObject).toHaveBeenCalledTimes(1); + expect(error.$metadata?.httpStatusCode).toBe(404); + } }); }); - describe('getUrl error path', () => { +}); + +describe('getUrl test with path', () => { + const getUrlWrapper = ( + input: GetUrlWithPathInput, + ): Promise => getUrl(input); + beforeAll(() => { + mockFetchAuthSession.mockResolvedValue({ + credentials, + identityId: defaultIdentityId, + }); + mockGetConfig.mockReturnValue({ + Storage: { + S3: { + bucket, + region, + }, + }, + }); + }); + + describe('Happy cases: With path', () => { + const config = { + credentials, + region, + userAgentValue: expect.any(String), + }; + beforeEach(() => { + (headObject as jest.MockedFunction).mockResolvedValue({ + ContentLength: 100, + ContentType: 'text/plain', + ETag: 'etag', + LastModified: new Date('01-01-1980'), + Metadata: { meta: 'value' }, + $metadata: {} as any, + }); + ( + getPresignedGetObjectUrl as jest.MockedFunction< + typeof getPresignedGetObjectUrl + > + ).mockResolvedValue(mockURL); + }); + afterEach(() => { + jest.clearAllMocks(); + }); + + test.each([ + { + path: 'path', + expectedKey: 'path', + }, + { + path: () => 'path', + expectedKey: 'path', + }, + ])( + 'should getUrl with path $path and expectedKey $expectedKey', + async ({ path, expectedKey }) => { + const headObjectOptions = { + Bucket: bucket, + Key: expectedKey, + }; + const { url, expiresAt } = await getUrlWrapper({ + path, + options: { + validateObjectExistence: true, + }, + }); + expect(getPresignedGetObjectUrl).toHaveBeenCalledTimes(1); + expect(headObject).toHaveBeenCalledTimes(1); + expect(headObject).toHaveBeenCalledWith(config, headObjectOptions); + expect({ url, expiresAt }).toEqual({ + url: mockURL, + expiresAt: expect.any(Date), + }); + }, + ); + }); + describe('Error cases : With path', () => { afterAll(() => { jest.clearAllMocks(); }); @@ -136,8 +249,8 @@ describe('getUrl test', () => { }); expect.assertions(2); try { - await getUrl({ - key: 'invalid_key', + await getUrlWrapper({ + path: 'invalid_key', options: { validateObjectExistence: true }, }); } catch (error: any) { diff --git a/packages/storage/__tests__/providers/s3/apis/list.test.ts b/packages/storage/__tests__/providers/s3/apis/list.test.ts index 681b4ff1af9..21ad76cdc33 100644 --- a/packages/storage/__tests__/providers/s3/apis/list.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/list.test.ts @@ -2,12 +2,18 @@ // SPDX-License-Identifier: Apache-2.0 import { AWSCredentials } from '@aws-amplify/core/internals/utils'; -import { Amplify } from '@aws-amplify/core'; +import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; import { listObjectsV2 } from '../../../../src/providers/s3/utils/client'; import { list } from '../../../../src/providers/s3'; import { - ListAllOptions, - ListPaginateOptions, + ListAllInput, + ListAllWithPathInput, + ListAllOutput, + ListAllWithPathOutput, + ListPaginateInput, + ListPaginateWithPathInput, + ListPaginateOutput, + ListPaginateWithPathOutput, } from '../../../../src/providers/s3/types'; jest.mock('../../../../src/providers/s3/utils/client'); @@ -26,7 +32,6 @@ const mockFetchAuthSession = Amplify.Auth.fetchAuthSession as jest.Mock; const mockGetConfig = Amplify.getConfig as jest.Mock; const mockListObject = listObjectsV2 as jest.Mock; const key = 'path/itemsKey'; -const path = key; const bucket = 'bucket'; const region = 'region'; const nextToken = 'nextToken'; @@ -55,7 +60,7 @@ const listResultItem = { lastModified, size, }; -const mockListObjectsV2ApiWithPages = pages => { +const mockListObjectsV2ApiWithPages = (pages: number) => { let methodCalls = 0; mockListObject.mockClear(); mockListObject.mockImplementation(async (_, input) => { @@ -89,43 +94,59 @@ describe('list API', () => { }, }); }); - describe('Happy Cases:', () => { + describe('Prefix: Happy Cases:', () => { + const listAllWrapper = (input: ListAllInput): Promise => + list(input); + const listPaginatedWrapper = ( + input: ListPaginateInput, + ): Promise => list(input); afterEach(() => { jest.clearAllMocks(); }); - const accessLevelTests = [ + const accessLevelTests: Array<{ + prefix?: string; + expectedKey: string; + options?: { + accessLevel?: StorageAccessLevel; + targetIdentityId?: string; + }; + }> = [ { - expectedPath: `public/`, + expectedKey: `public/`, }, { - path, - expectedPath: `public/${path}`, + options: { accessLevel: 'guest' }, + expectedKey: `public/`, + }, + { + prefix: key, + expectedKey: `public/${key}`, }, { - path, + prefix: key, options: { accessLevel: 'guest' }, - expectedPath: `public/${path}`, + expectedKey: `public/${key}`, }, { - path, + prefix: key, options: { accessLevel: 'private' }, - expectedPath: `private/${defaultIdentityId}/${path}`, + expectedKey: `private/${defaultIdentityId}/${key}`, }, { - path, + prefix: key, options: { accessLevel: 'protected' }, - expectedPath: `protected/${defaultIdentityId}/${path}`, + expectedKey: `protected/${defaultIdentityId}/${key}`, }, { - path, + prefix: key, options: { accessLevel: 'protected', targetIdentityId }, - expectedPath: `protected/${targetIdentityId}/${path}`, + expectedKey: `protected/${targetIdentityId}/${key}`, }, ]; - accessLevelTests.forEach(({ path, options, expectedPath }) => { - const pathMsg = path ? 'custom' : 'default'; + accessLevelTests.forEach(({ prefix, options, expectedKey }) => { + const pathMsg = prefix ? 'custom' : 'default'; const accessLevelMsg = options?.accessLevel ?? 'default'; const targetIdentityIdMsg = options?.targetIdentityId ? `with targetIdentityId` @@ -133,32 +154,32 @@ describe('list API', () => { it(`should list objects with pagination, default pageSize, ${pathMsg} path, ${accessLevelMsg} accessLevel ${targetIdentityIdMsg}`, async () => { mockListObject.mockImplementationOnce(() => { return { - Contents: [ - { ...listObjectClientBaseResultItem, Key: expectedPath }, - ], + Contents: [{ ...listObjectClientBaseResultItem, Key: expectedKey }], NextContinuationToken: nextToken, }; }); - expect.assertions(4); - let response = await list({ - prefix: path, - options: options as ListPaginateOptions, + const response = await listPaginatedWrapper({ + prefix, + options: options, + }); + const { key, eTag, size, lastModified } = response.items[0]; + expect(response.items).toHaveLength(1); + expect({ key, eTag, size, lastModified }).toEqual({ + key: prefix ?? '', + ...listResultItem, }); - expect(response.items).toEqual([ - { ...listResultItem, key: path ?? '' }, - ]); expect(response.nextToken).toEqual(nextToken); expect(listObjectsV2).toHaveBeenCalledTimes(1); expect(listObjectsV2).toHaveBeenCalledWith(listObjectClientConfig, { Bucket: bucket, MaxKeys: 1000, - Prefix: expectedPath, + Prefix: expectedKey, }); }); }); - accessLevelTests.forEach(({ path, options, expectedPath }) => { - const pathMsg = path ? 'custom' : 'default'; + accessLevelTests.forEach(({ prefix, options, expectedKey }) => { + const pathMsg = prefix ? 'custom' : 'default'; const accessLevelMsg = options?.accessLevel ?? 'default'; const targetIdentityIdMsg = options?.targetIdentityId ? `with targetIdentityId` @@ -166,38 +187,38 @@ describe('list API', () => { it(`should list objects with pagination using pageSize, nextToken, ${pathMsg} path, ${accessLevelMsg} accessLevel ${targetIdentityIdMsg}`, async () => { mockListObject.mockImplementationOnce(() => { return { - Contents: [ - { ...listObjectClientBaseResultItem, Key: expectedPath }, - ], + Contents: [{ ...listObjectClientBaseResultItem, Key: expectedKey }], NextContinuationToken: nextToken, }; }); - expect.assertions(4); const customPageSize = 5; - const response = await list({ - prefix: path, + const response = await listPaginatedWrapper({ + prefix, options: { - ...(options as ListPaginateOptions), + ...options, pageSize: customPageSize, nextToken: nextToken, }, }); - expect(response.items).toEqual([ - { ...listResultItem, key: path ?? '' }, - ]); + const { key, eTag, size, lastModified } = response.items[0]; + expect(response.items).toHaveLength(1); + expect({ key, eTag, size, lastModified }).toEqual({ + key: prefix ?? '', + ...listResultItem, + }); expect(response.nextToken).toEqual(nextToken); expect(listObjectsV2).toHaveBeenCalledTimes(1); expect(listObjectsV2).toHaveBeenCalledWith(listObjectClientConfig, { Bucket: bucket, - Prefix: expectedPath, + Prefix: expectedKey, ContinuationToken: nextToken, MaxKeys: customPageSize, }); }); }); - accessLevelTests.forEach(({ path, options, expectedPath }) => { - const pathMsg = path ? 'custom' : 'default'; + accessLevelTests.forEach(({ prefix, options, expectedKey }) => { + const pathMsg = prefix ? 'custom' : 'default'; const accessLevelMsg = options?.accessLevel ?? 'default'; const targetIdentityIdMsg = options?.targetIdentityId ? `with targetIdentityId` @@ -206,38 +227,39 @@ describe('list API', () => { mockListObject.mockImplementationOnce(() => { return {}; }); - expect.assertions(3); - let response = await list({ - prefix: path, - options: options as ListPaginateOptions, + let response = await listPaginatedWrapper({ + prefix, + options, }); expect(response.items).toEqual([]); - // + expect(response.nextToken).toEqual(undefined); expect(listObjectsV2).toHaveBeenCalledWith(listObjectClientConfig, { Bucket: bucket, MaxKeys: 1000, - Prefix: expectedPath, + Prefix: expectedKey, }); }); }); - accessLevelTests.forEach(({ path, options, expectedPath }) => { - const pathMsg = path ? 'custom' : 'default'; + accessLevelTests.forEach(({ prefix: inputKey, options, expectedKey }) => { + const pathMsg = inputKey ? 'custom' : 'default'; const accessLevelMsg = options?.accessLevel ?? 'default'; const targetIdentityIdMsg = options?.targetIdentityId ? `with targetIdentityId` : ''; it(`should list all objects having three pages with ${pathMsg} path, ${accessLevelMsg} accessLevel ${targetIdentityIdMsg}`, async () => { - expect.assertions(5); mockListObjectsV2ApiWithPages(3); - const result = await list({ - prefix: path, - options: { ...options, listAll: true } as ListAllOptions, + const result = await listAllWrapper({ + prefix: inputKey, + options: { ...options, listAll: true }, + }); + const { key, eTag, lastModified, size } = result.items[0]; + expect(result.items).toHaveLength(3); + expect({ key, eTag, lastModified, size }).toEqual({ + ...listResultItem, + key: inputKey ?? '', }); - - const listResult = { ...listResultItem, key: path ?? '' }; - expect(result.items).toEqual([listResult, listResult, listResult]); expect(result).not.toHaveProperty(nextToken); // listing three times for three pages @@ -249,7 +271,7 @@ describe('list API', () => { listObjectClientConfig, { Bucket: bucket, - Prefix: expectedPath, + Prefix: expectedKey, MaxKeys: 1000, ContinuationToken: undefined, }, @@ -260,7 +282,7 @@ describe('list API', () => { listObjectClientConfig, { Bucket: bucket, - Prefix: expectedPath, + Prefix: expectedKey, MaxKeys: 1000, ContinuationToken: nextToken, }, @@ -269,6 +291,172 @@ describe('list API', () => { }); }); + describe('Path: Happy Cases:', () => { + const listAllWrapper = ( + input: ListAllWithPathInput, + ): Promise => list(input); + const listPaginatedWrapper = ( + input: ListPaginateWithPathInput, + ): Promise => list(input); + const resolvePath = (path: string | Function) => + typeof path === 'string' ? path : path({ identityId: defaultIdentityId }); + afterEach(() => { + jest.clearAllMocks(); + mockListObject.mockClear(); + }); + const pathTestCases = [ + { + path: `public/${key}`, + }, + { + path: ({ identityId }: { identityId: string }) => + `protected/${identityId}/${key}`, + }, + ]; + + it.each(pathTestCases)( + 'should list objects with pagination, default pageSize, custom path', + async ({ path: inputPath }) => { + const resolvedPath = resolvePath(inputPath); + mockListObject.mockImplementationOnce(() => { + return { + Contents: [ + { + ...listObjectClientBaseResultItem, + Key: resolvePath(inputPath), + }, + ], + NextContinuationToken: nextToken, + }; + }); + const response = await listPaginatedWrapper({ + path: resolvedPath, + }); + const { path, eTag, lastModified, size } = response.items[0]; + expect(response.items).toHaveLength(1); + expect({ path, eTag, lastModified, size }).toEqual({ + ...listResultItem, + path: resolvedPath, + }); + expect(response.nextToken).toEqual(nextToken); + expect(listObjectsV2).toHaveBeenCalledTimes(1); + expect(listObjectsV2).toHaveBeenCalledWith(listObjectClientConfig, { + Bucket: bucket, + MaxKeys: 1000, + Prefix: resolvePath(inputPath), + }); + }, + ); + + it.each(pathTestCases)( + 'should list objects with pagination using custom pageSize, nextToken and custom path: ${path}', + async ({ path: inputPath }) => { + const resolvedPath = resolvePath(inputPath); + mockListObject.mockImplementationOnce(() => { + return { + Contents: [ + { + ...listObjectClientBaseResultItem, + Key: resolvePath(inputPath), + }, + ], + NextContinuationToken: nextToken, + }; + }); + const customPageSize = 5; + const response = await listPaginatedWrapper({ + path: resolvedPath, + options: { + pageSize: customPageSize, + nextToken: nextToken, + }, + }); + const { path, eTag, lastModified, size } = response.items[0]; + expect(response.items).toHaveLength(1); + expect({ path, eTag, lastModified, size }).toEqual({ + ...listResultItem, + path: resolvedPath, + }); + expect(response.nextToken).toEqual(nextToken); + expect(listObjectsV2).toHaveBeenCalledTimes(1); + expect(listObjectsV2).toHaveBeenCalledWith(listObjectClientConfig, { + Bucket: bucket, + Prefix: resolvePath(inputPath), + ContinuationToken: nextToken, + MaxKeys: customPageSize, + }); + }, + ); + + it.each(pathTestCases)( + 'should list objects with zero results with custom path: ${path}', + async ({ path }) => { + mockListObject.mockImplementationOnce(() => { + return {}; + }); + let response = await listPaginatedWrapper({ + path: resolvePath(path), + }); + expect(response.items).toEqual([]); + + expect(response.nextToken).toEqual(undefined); + expect(listObjectsV2).toHaveBeenCalledWith(listObjectClientConfig, { + Bucket: bucket, + MaxKeys: 1000, + Prefix: resolvePath(path), + }); + }, + ); + + it.each(pathTestCases)( + 'should list all objects having three pages with custom path: ${path}', + async ({ path: inputPath }) => { + const resolvedPath = resolvePath(inputPath); + mockListObjectsV2ApiWithPages(3); + const result = await listAllWrapper({ + path: resolvedPath, + options: { listAll: true }, + }); + + const listResult = { + path: resolvedPath, + ...listResultItem, + }; + const { path, lastModified, eTag, size } = result.items[0]; + expect(result.items).toHaveLength(3); + expect({ path, lastModified, eTag, size }).toEqual(listResult); + expect(result.items).toEqual([listResult, listResult, listResult]); + expect(result).not.toHaveProperty(nextToken); + + // listing three times for three pages + expect(listObjectsV2).toHaveBeenCalledTimes(3); + + // first input recieves undefined as the Continuation Token + expect(listObjectsV2).toHaveBeenNthCalledWith( + 1, + listObjectClientConfig, + { + Bucket: bucket, + Prefix: resolvedPath, + MaxKeys: 1000, + ContinuationToken: undefined, + }, + ); + // last input recieves TEST_TOKEN as the Continuation Token + expect(listObjectsV2).toHaveBeenNthCalledWith( + 3, + listObjectClientConfig, + { + Bucket: bucket, + Prefix: resolvedPath, + MaxKeys: 1000, + ContinuationToken: nextToken, + }, + ); + }, + ); + }); + describe('Error Cases:', () => { afterEach(() => { jest.clearAllMocks(); @@ -280,10 +468,10 @@ describe('list API', () => { name: 'NotFound', }), ); - expect.assertions(3); try { await list({}); } catch (error: any) { + expect.assertions(3); expect(listObjectsV2).toHaveBeenCalledTimes(1); expect(listObjectsV2).toHaveBeenCalledWith(listObjectClientConfig, { Bucket: bucket, diff --git a/packages/storage/__tests__/providers/s3/apis/remove.test.ts b/packages/storage/__tests__/providers/s3/apis/remove.test.ts index 0f2e1a65b17..0c8662492ac 100644 --- a/packages/storage/__tests__/providers/s3/apis/remove.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/remove.test.ts @@ -2,10 +2,16 @@ // SPDX-License-Identifier: Apache-2.0 import { AWSCredentials } from '@aws-amplify/core/internals/utils'; -import { Amplify } from '@aws-amplify/core'; +import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; import { deleteObject } from '../../../../src/providers/s3/utils/client'; import { remove } from '../../../../src/providers/s3/apis'; -import { StorageOptions } from '../../../../src/types'; +import { StorageValidationErrorCode } from '../../../../src/errors/types/validation'; +import { + RemoveInput, + RemoveWithPathInput, + RemoveOutput, + RemoveWithPathOutput, +} from '../../../../src/providers/s3/types'; jest.mock('../../../../src/providers/s3/utils/client'); jest.mock('@aws-amplify/core', () => ({ @@ -22,11 +28,10 @@ jest.mock('@aws-amplify/core', () => ({ const mockDeleteObject = deleteObject as jest.Mock; const mockFetchAuthSession = Amplify.Auth.fetchAuthSession as jest.Mock; const mockGetConfig = Amplify.getConfig as jest.Mock; -const key = 'key'; +const inputKey = 'key'; const bucket = 'bucket'; const region = 'region'; const defaultIdentityId = 'defaultIdentityId'; -const removeResult = { key }; const credentials: AWSCredentials = { accessKeyId: 'accessKeyId', sessionToken: 'sessionToken', @@ -53,51 +58,101 @@ describe('remove API', () => { }, }); }); - describe('Happy Path Cases:', () => { - beforeEach(() => { - mockDeleteObject.mockImplementation(() => { - return { - Metadata: { key: 'value' }, - }; + describe('Happy Cases', () => { + describe('With Key', () => { + const removeWrapper = (input: RemoveInput): Promise => + remove(input); + + beforeEach(() => { + mockDeleteObject.mockImplementation(() => { + return { + Metadata: { key: 'value' }, + }; + }); + }); + afterEach(() => { + jest.clearAllMocks(); + }); + const testCases: Array<{ + expectedKey: string; + options?: { accessLevel?: StorageAccessLevel }; + }> = [ + { + expectedKey: `public/${inputKey}`, + }, + { + options: { accessLevel: 'guest' }, + expectedKey: `public/${inputKey}`, + }, + { + options: { accessLevel: 'private' }, + expectedKey: `private/${defaultIdentityId}/${inputKey}`, + }, + { + options: { accessLevel: 'protected' }, + expectedKey: `protected/${defaultIdentityId}/${inputKey}`, + }, + ]; + + testCases.forEach(({ options, expectedKey }) => { + const accessLevel = options?.accessLevel ?? 'default'; + + it(`should remove object with ${accessLevel} accessLevel`, async () => { + const { key } = await removeWrapper({ + key: inputKey, + options: options, + }); + expect(key).toEqual(inputKey); + expect(deleteObject).toHaveBeenCalledTimes(1); + expect(deleteObject).toHaveBeenCalledWith(deleteObjectClientConfig, { + Bucket: bucket, + Key: expectedKey, + }); + }); }); }); - afterEach(() => { - jest.clearAllMocks(); - }); - [ - { - expectedKey: `public/${key}`, - }, - { - options: { accessLevel: 'guest' }, - expectedKey: `public/${key}`, - }, - { - options: { accessLevel: 'private' }, - expectedKey: `private/${defaultIdentityId}/${key}`, - }, - { - options: { accessLevel: 'protected' }, - expectedKey: `protected/${defaultIdentityId}/${key}`, - }, - ].forEach(({ options, expectedKey }) => { - const accessLevel = options?.accessLevel ?? 'default'; + describe('With Path', () => { + const removeWrapper = ( + input: RemoveWithPathInput, + ): Promise => remove(input); + beforeEach(() => { + mockDeleteObject.mockImplementation(() => { + return { + Metadata: { key: 'value' }, + }; + }); + }); + afterEach(() => { + jest.clearAllMocks(); + }); + [ + { + path: `public/${inputKey}`, + }, + { + path: ({ identityId }: { identityId?: string }) => + `protected/${identityId}/${inputKey}`, + }, + ].forEach(({ path: inputPath }) => { + const resolvedPath = + typeof inputPath === 'string' + ? inputPath + : inputPath({ identityId: defaultIdentityId }); - it(`should remove object with ${accessLevel} accessLevel`, async () => { - expect.assertions(3); - expect( - await remove({ key, options: options as StorageOptions }), - ).toEqual(removeResult); - expect(deleteObject).toHaveBeenCalledTimes(1); - expect(deleteObject).toHaveBeenCalledWith(deleteObjectClientConfig, { - Bucket: bucket, - Key: expectedKey, + it(`should remove object for the given path`, async () => { + const { path } = await removeWrapper({ path: inputPath }); + expect(path).toEqual(resolvedPath); + expect(deleteObject).toHaveBeenCalledTimes(1); + expect(deleteObject).toHaveBeenCalledWith(deleteObjectClientConfig, { + Bucket: bucket, + Key: resolvedPath, + }); }); }); }); }); - describe('Error Path Cases:', () => { + describe('Error Cases:', () => { afterEach(() => { jest.clearAllMocks(); }); @@ -121,5 +176,15 @@ describe('remove API', () => { expect(error.$metadata.httpStatusCode).toBe(404); } }); + it('should throw InvalidStorageOperationInput error when the path is empty', async () => { + expect.assertions(1); + try { + await remove({ path: '' }); + } catch (error: any) { + expect(error.name).toBe( + StorageValidationErrorCode.InvalidStorageOperationInput, + ); + } + }); }); }); diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts index 22569bf6671..211d3238a35 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/index.test.ts @@ -10,11 +10,13 @@ import { } from '../../../../../src/errors/types/validation'; import { putObjectJob } from '../../../../../src/providers/s3/apis/uploadData/putObjectJob'; import { getMultipartUploadHandlers } from '../../../../../src/providers/s3/apis/uploadData/multipart'; +import { UploadDataInput, UploadDataWithPathInput } from '../../../../../src'; jest.mock('../../../../../src/providers/s3/utils/'); jest.mock('../../../../../src/providers/s3/apis/uploadData/putObjectJob'); jest.mock('../../../../../src/providers/s3/apis/uploadData/multipart'); +const testPath = 'testPath/object'; const mockCreateUploadTask = createUploadTask as jest.Mock; const mockPutObjectJob = putObjectJob as jest.Mock; const mockGetMultipartUploadHandlers = ( @@ -26,19 +28,19 @@ const mockGetMultipartUploadHandlers = ( onCancel: jest.fn(), }); -describe('uploadData', () => { +/* TODO Remove suite when `key` parameter is removed */ +describe('uploadData with key', () => { afterEach(() => { jest.clearAllMocks(); }); describe('validation', () => { it('should throw if data size is too big', async () => { - expect(() => - uploadData({ - key: 'key', - data: { size: MAX_OBJECT_SIZE + 1 } as any, - }), - ).toThrow( + const mockUploadInput: UploadDataInput = { + key: 'key', + data: { size: MAX_OBJECT_SIZE + 1 } as any, + }; + expect(() => uploadData(mockUploadInput)).toThrow( expect.objectContaining( validationErrorMap[StorageValidationErrorCode.ObjectIsTooLarge], ), @@ -54,7 +56,7 @@ describe('uploadData', () => { }); }); - describe('use putObject', () => { + describe('use putObject for small uploads', () => { const smallData = { size: 5 * 1024 * 1024 } as any; it('should use putObject if data size is <= 5MB', async () => { uploadData({ @@ -83,7 +85,7 @@ describe('uploadData', () => { }); }); - describe('use multipartUpload', () => { + describe('use multipartUpload for large uploads', () => { const biggerData = { size: 5 * 1024 * 1024 + 1 } as any; it('should use multipartUpload if data size is > 5MB', async () => { uploadData({ @@ -121,3 +123,116 @@ describe('uploadData', () => { }); }); }); + +describe('uploadData with path', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + describe('validation', () => { + it('should throw if data size is too big', async () => { + const mockUploadInput: UploadDataWithPathInput = { + path: testPath, + data: { size: MAX_OBJECT_SIZE + 1 } as any, + }; + expect(() => uploadData(mockUploadInput)).toThrow( + expect.objectContaining( + validationErrorMap[StorageValidationErrorCode.ObjectIsTooLarge], + ), + ); + }); + + it('should NOT throw if data size is unknown', async () => { + uploadData({ + path: testPath, + data: {} as any, + }); + expect(mockCreateUploadTask).toHaveBeenCalled(); + }); + }); + + describe('use putObject for small uploads', () => { + const smallData = { size: 5 * 1024 * 1024 } as any; + + test.each([ + { + path: testPath, + }, + { + path: () => testPath, + }, + ])( + 'should use putObject if data size is <= 5MB when path is $path', + async ({ path }) => { + const testInput = { + path, + data: smallData, + }; + + uploadData(testInput); + + expect(mockPutObjectJob).toHaveBeenCalledWith( + testInput, + expect.any(AbortSignal), + expect.any(Number), + ); + expect(mockGetMultipartUploadHandlers).not.toHaveBeenCalled(); + }, + ); + + it('should use uploadTask', async () => { + mockPutObjectJob.mockReturnValueOnce('putObjectJob'); + mockCreateUploadTask.mockReturnValueOnce('uploadTask'); + + const task = uploadData({ + path: testPath, + data: smallData, + }); + + expect(task).toBe('uploadTask'); + expect(mockCreateUploadTask).toHaveBeenCalledWith( + expect.objectContaining({ + job: 'putObjectJob', + onCancel: expect.any(Function), + isMultipartUpload: false, + }), + ); + }); + }); + + describe('use multipartUpload for large uploads', () => { + const biggerData = { size: 5 * 1024 * 1024 + 1 } as any; + it('should use multipartUpload if data size is > 5MB', async () => { + const testInput = { + path: testPath, + data: biggerData, + }; + + uploadData(testInput); + + expect(mockPutObjectJob).not.toHaveBeenCalled(); + expect(mockGetMultipartUploadHandlers).toHaveBeenCalledWith( + testInput, + expect.any(Number), + ); + }); + + it('should use uploadTask', async () => { + mockCreateUploadTask.mockReturnValueOnce('uploadTask'); + const task = uploadData({ + path: testPath, + data: biggerData, + }); + expect(task).toBe('uploadTask'); + expect(mockCreateUploadTask).toHaveBeenCalledWith( + expect.objectContaining({ + job: expect.any(Function), + onCancel: expect.any(Function), + onResume: expect.any(Function), + onPause: expect.any(Function), + isMultipartUpload: true, + }), + ); + }); + }); +}); diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts index 65b4dd473a6..302d76beaa8 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/multipartHandlers.test.ts @@ -36,6 +36,8 @@ const region = 'region'; const defaultKey = 'key'; const defaultContentType = 'application/octet-stream'; const defaultCacheKey = '8388608_application/octet-stream_bucket_public_key'; +const testPath = 'testPath/object'; +const testPathCacheKey = `8388608_${defaultContentType}_${bucket}_custom_${testPath}`; const mockCreateMultipartUpload = createMultipartUpload as jest.Mock; const mockUploadPart = uploadPart as jest.Mock; @@ -116,7 +118,8 @@ const resetS3Mocks = () => { mockListParts.mockReset(); }; -describe('getMultipartUploadHandlers', () => { +/* TODO Remove suite when `key` parameter is removed */ +describe('getMultipartUploadHandlers with key', () => { beforeAll(() => { mockFetchAuthSession.mockResolvedValue({ credentials, @@ -629,3 +632,510 @@ describe('getMultipartUploadHandlers', () => { }); }); }); + +describe('getMultipartUploadHandlers with path', () => { + beforeAll(() => { + mockFetchAuthSession.mockResolvedValue({ + credentials, + identityId: defaultIdentityId, + }); + (Amplify.getConfig as jest.Mock).mockReturnValue({ + Storage: { + S3: { + bucket, + region, + }, + }, + }); + }); + + afterEach(() => { + jest.clearAllMocks(); + resetS3Mocks(); + }); + + it('should return multipart upload handlers', async () => { + const multipartUploadHandlers = getMultipartUploadHandlers( + { + path: testPath, + data: { size: 5 * 1024 * 1024 } as any, + }, + 5 * 1024 * 1024, + ); + expect(multipartUploadHandlers).toEqual({ + multipartUploadJob: expect.any(Function), + onPause: expect.any(Function), + onResume: expect.any(Function), + onCancel: expect.any(Function), + }); + }); + + describe('upload', () => { + const getBlob = (size: number) => new Blob(['1'.repeat(size)]); + [ + { + path: testPath, + expectedKey: testPath, + }, + { + path: ({ identityId }: { identityId?: string }) => + `testPath/${identityId}/object`, + expectedKey: `testPath/${defaultIdentityId}/object`, + }, + ].forEach(({ path: inputPath, expectedKey }) => { + it.each([ + ['file', new File([getBlob(8 * MB)], 'someName')], + ['blob', getBlob(8 * MB)], + ['string', 'Ü'.repeat(4 * MB)], + ['arrayBuffer', new ArrayBuffer(8 * MB)], + ['arrayBufferView', new Uint8Array(8 * MB)], + ])( + `should upload a %s type body that splits into 2 parts to path ${expectedKey}`, + async (_, twoPartsPayload) => { + mockMultipartUploadSuccess(); + const { multipartUploadJob } = getMultipartUploadHandlers({ + path: inputPath, + data: twoPartsPayload, + }); + const result = await multipartUploadJob(); + expect(mockCreateMultipartUpload).toHaveBeenCalledWith( + expect.objectContaining({ + credentials, + region, + abortSignal: expect.any(AbortSignal), + }), + expect.objectContaining({ + Bucket: bucket, + Key: expectedKey, + ContentType: defaultContentType, + }), + ); + expect(result).toEqual( + expect.objectContaining({ path: expectedKey, eTag: 'etag' }), + ); + expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1); + expect(mockUploadPart).toHaveBeenCalledTimes(2); + expect(mockCompleteMultipartUpload).toHaveBeenCalledTimes(1); + }, + ); + }); + + it('should throw if unsupported payload type is provided', async () => { + mockMultipartUploadSuccess(); + const { multipartUploadJob } = getMultipartUploadHandlers({ + path: testPath, + data: 1 as any, + }); + await expect(multipartUploadJob()).rejects.toThrow( + expect.objectContaining( + validationErrorMap[StorageValidationErrorCode.InvalidUploadSource], + ), + ); + }); + + it('should upload a body that exceeds the size of default part size and parts count', async () => { + let buffer: ArrayBuffer; + const file = { + __proto__: File.prototype, + name: 'some file', + lastModified: 0, + size: 100_000 * MB, + type: 'text/plain', + slice: jest.fn().mockImplementation((start, end) => { + if (end - start !== buffer?.byteLength) { + buffer = new ArrayBuffer(end - start); + } + return buffer; + }), + } as any as File; + mockMultipartUploadSuccess(); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: file, + }, + file.size, + ); + await multipartUploadJob(); + expect(file.slice).toHaveBeenCalledTimes(10_000); // S3 limit of parts count + expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1); + expect(mockUploadPart).toHaveBeenCalledTimes(10_000); + expect(mockCompleteMultipartUpload).toHaveBeenCalledTimes(1); + expect(mockUploadPart.mock.calls[0][1].Body.byteLength).toEqual(10 * MB); // The part size should be adjusted from default 5MB to 10MB. + }); + + it('should throw error when remote and local file sizes do not match upon completed upload', async () => { + expect.assertions(1); + mockMultipartUploadSuccess(disableAssertion); + mockHeadObject.mockReset(); + mockHeadObject.mockResolvedValue({ + ContentLength: 1, + }); + + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + }, + 8 * MB, + ); + try { + await multipartUploadJob(); + fail('should throw error'); + } catch (e: any) { + expect(e.message).toEqual( + `Upload failed. Expected object size ${8 * MB}, but got 1.`, + ); + } + }); + + it('should handle error case: create multipart upload request failed', async () => { + expect.assertions(1); + mockMultipartUploadSuccess(); + mockCreateMultipartUpload.mockReset(); + mockCreateMultipartUpload.mockRejectedValueOnce(new Error('error')); + + const { multipartUploadJob } = getMultipartUploadHandlers({ + path: testPath, + data: new ArrayBuffer(8 * MB), + }); + await expect(multipartUploadJob()).rejects.toThrow('error'); + }); + + it('should handle error case: finish multipart upload failed', async () => { + expect.assertions(1); + mockMultipartUploadSuccess(disableAssertion); + mockCompleteMultipartUpload.mockReset(); + mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error')); + + const { multipartUploadJob } = getMultipartUploadHandlers({ + path: testPath, + data: new ArrayBuffer(8 * MB), + }); + await expect(multipartUploadJob()).rejects.toThrow('error'); + }); + + it('should handle error case: upload a body that splits in two parts but second part fails', async () => { + expect.assertions(3); + mockMultipartUploadSuccess(disableAssertion); + mockUploadPart.mockReset(); + mockUploadPart.mockResolvedValueOnce({ + ETag: `etag-1`, + PartNumber: 1, + }); + mockUploadPart.mockRejectedValueOnce(new Error('error')); + + const { multipartUploadJob } = getMultipartUploadHandlers({ + path: testPath, + data: new ArrayBuffer(8 * MB), + }); + await expect(multipartUploadJob()).rejects.toThrow('error'); + expect(mockUploadPart).toHaveBeenCalledTimes(2); + expect(mockCompleteMultipartUpload).not.toHaveBeenCalled(); + }); + }); + + describe('upload caching', () => { + const mockDefaultStorage = defaultStorage as jest.Mocked< + typeof defaultStorage + >; + beforeEach(() => { + mockDefaultStorage.getItem.mockReset(); + mockDefaultStorage.setItem.mockReset(); + }); + + it('should send createMultipartUpload request if the upload task is not cached', async () => { + mockMultipartUploadSuccess(); + const size = 8 * MB; + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(size), + }, + size, + ); + await multipartUploadJob(); + // 1 for caching upload task; 1 for remove cache after upload is completed + expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); + expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1); + expect(mockListParts).not.toHaveBeenCalled(); + }); + + it('should send createMultipartUpload request if the upload task is cached but outdated', async () => { + mockDefaultStorage.getItem.mockResolvedValue( + JSON.stringify({ + [testPathCacheKey]: { + uploadId: 'uploadId', + bucket, + key: testPath, + lastTouched: Date.now() - 2 * 60 * 60 * 1000, // 2 hours ago + }, + }), + ); + mockMultipartUploadSuccess(); + mockListParts.mockResolvedValueOnce({ Parts: [] }); + const size = 8 * MB; + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(size), + }, + size, + ); + await multipartUploadJob(); + expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1); + expect(mockListParts).not.toHaveBeenCalled(); + expect(mockUploadPart).toHaveBeenCalledTimes(2); + expect(mockCompleteMultipartUpload).toHaveBeenCalledTimes(1); + }); + + it('should cache the upload with file including file lastModified property', async () => { + mockMultipartUploadSuccess(); + mockListParts.mockResolvedValueOnce({ Parts: [] }); + const size = 8 * MB; + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new File([new ArrayBuffer(size)], 'someName'), + }, + size, + ); + await multipartUploadJob(); + // 1 for caching upload task; 1 for remove cache after upload is completed + expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); + const cacheValue = JSON.parse( + mockDefaultStorage.setItem.mock.calls[0][1], + ); + + // \d{13} is the file lastModified property of a file + const lastModifiedRegex = /someName_\d{13}_/; + + expect(Object.keys(cacheValue)).toEqual([ + expect.stringMatching( + new RegExp(lastModifiedRegex.source + testPathCacheKey), + ), + ]); + }); + + it('should send listParts request if the upload task is cached', async () => { + mockDefaultStorage.getItem.mockResolvedValue( + JSON.stringify({ + [testPathCacheKey]: { + uploadId: 'uploadId', + bucket, + key: testPath, + lastModified: Date.now(), + }, + }), + ); + mockMultipartUploadSuccess(); + mockListParts.mockResolvedValueOnce({ Parts: [] }); + const size = 8 * MB; + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(size), + }, + size, + ); + await multipartUploadJob(); + expect(mockCreateMultipartUpload).not.toHaveBeenCalled(); + expect(mockListParts).toHaveBeenCalledTimes(1); + expect(mockUploadPart).toHaveBeenCalledTimes(2); + expect(mockCompleteMultipartUpload).toHaveBeenCalledTimes(1); + }); + + it('should cache upload task if new upload task is created', async () => { + mockMultipartUploadSuccess(); + mockListParts.mockResolvedValueOnce({ Parts: [] }); + const size = 8 * MB; + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(size), + }, + size, + ); + await multipartUploadJob(); + // 1 for caching upload task; 1 for remove cache after upload is completed + expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); + expect(mockDefaultStorage.setItem.mock.calls[0][0]).toEqual( + UPLOADS_STORAGE_KEY, + ); + const cacheValue = JSON.parse( + mockDefaultStorage.setItem.mock.calls[0][1], + ); + expect(Object.keys(cacheValue)).toEqual([ + expect.stringMatching(new RegExp(testPathCacheKey)), + ]); + }); + + it('should remove from cache if upload task is completed', async () => { + mockMultipartUploadSuccess(); + mockListParts.mockResolvedValueOnce({ Parts: [] }); + const size = 8 * MB; + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(size), + }, + size, + ); + await multipartUploadJob(); + // 1 for caching upload task; 1 for remove cache after upload is completed + expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); + expect(mockDefaultStorage.setItem).toHaveBeenNthCalledWith( + 2, + UPLOADS_STORAGE_KEY, + JSON.stringify({}), + ); + }); + + it('should remove from cache if upload task is canceled', async () => { + expect.assertions(2); + mockMultipartUploadSuccess(disableAssertion); + mockListParts.mockResolvedValueOnce({ Parts: [] }); + const size = 8 * MB; + const { multipartUploadJob, onCancel } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(size), + }, + size, + ); + const uploadJobPromise = multipartUploadJob(); + await uploadJobPromise; + // 1 for caching upload task; 1 for remove cache after upload is completed + expect(mockDefaultStorage.setItem).toHaveBeenCalledTimes(2); + expect(mockDefaultStorage.setItem).toHaveBeenNthCalledWith( + 2, + UPLOADS_STORAGE_KEY, + JSON.stringify({}), + ); + }); + }); + + describe('cancel()', () => { + it('should abort in-flight uploadPart requests and throw if upload is canceled', async () => { + const { multipartUploadJob, onCancel } = getMultipartUploadHandlers({ + path: testPath, + data: new ArrayBuffer(8 * MB), + }); + let partCount = 0; + mockMultipartUploadCancellation(() => { + partCount++; + if (partCount === 2) { + onCancel(); // Cancel upload at the the last uploadPart call + } + }); + try { + await multipartUploadJob(); + fail('should throw error'); + } catch (error: any) { + expect(error).toBeInstanceOf(CanceledError); + expect(error.message).toBe('Upload is canceled by user'); + } + expect(mockAbortMultipartUpload).toHaveBeenCalledTimes(1); + expect(mockUploadPart).toHaveBeenCalledTimes(2); + expect(mockUploadPart.mock.calls[0][0].abortSignal?.aborted).toBe(true); + expect(mockUploadPart.mock.calls[1][0].abortSignal?.aborted).toBe(true); + }); + }); + + describe('pause() & resume()', () => { + it('should abort in-flight uploadPart requests if upload is paused', async () => { + const { multipartUploadJob, onPause, onResume } = + getMultipartUploadHandlers({ + path: testPath, + data: new ArrayBuffer(8 * MB), + }); + let partCount = 0; + mockMultipartUploadCancellation(() => { + partCount++; + if (partCount === 2) { + onPause(); // Pause upload at the the last uploadPart call + } + }); + const uploadPromise = multipartUploadJob(); + onResume(); + await uploadPromise; + expect(mockUploadPart).toHaveBeenCalledTimes(2); + expect(mockUploadPart.mock.calls[0][0].abortSignal?.aborted).toBe(true); + expect(mockUploadPart.mock.calls[1][0].abortSignal?.aborted).toBe(true); + }); + }); + + describe('upload progress', () => { + it('should send progress for in-flight upload parts', async () => { + const onProgress = jest.fn(); + mockMultipartUploadSuccess(); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + options: { + onProgress, + }, + }, + 8 * MB, + ); + await multipartUploadJob(); + expect(onProgress).toHaveBeenCalledTimes(4); // 2 simulated onProgress events per uploadPart call are all tracked + expect(onProgress).toHaveBeenNthCalledWith(1, { + totalBytes: 8388608, + transferredBytes: 2621440, + }); + expect(onProgress).toHaveBeenNthCalledWith(2, { + totalBytes: 8388608, + transferredBytes: 5242880, + }); + expect(onProgress).toHaveBeenNthCalledWith(3, { + totalBytes: 8388608, + transferredBytes: 6815744, + }); + expect(onProgress).toHaveBeenNthCalledWith(4, { + totalBytes: 8388608, + transferredBytes: 8388608, + }); + }); + + it('should send progress for cached upload parts', async () => { + mockMultipartUploadSuccess(); + + const mockDefaultStorage = defaultStorage as jest.Mocked< + typeof defaultStorage + >; + mockDefaultStorage.getItem.mockResolvedValue( + JSON.stringify({ + [testPathCacheKey]: { + uploadId: 'uploadId', + bucket, + key: testPath, + }, + }), + ); + mockListParts.mockResolvedValue({ + Parts: [{ PartNumber: 1 }], + }); + + const onProgress = jest.fn(); + const { multipartUploadJob } = getMultipartUploadHandlers( + { + path: testPath, + data: new ArrayBuffer(8 * MB), + options: { + onProgress, + }, + }, + 8 * MB, + ); + await multipartUploadJob(); + expect(onProgress).toHaveBeenCalledTimes(3); + // The first part's 5 MB progress is reported even though no uploadPart call is made. + expect(onProgress).toHaveBeenNthCalledWith(1, { + totalBytes: 8388608, + transferredBytes: 5242880, + }); + }); + }); +}); diff --git a/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts b/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts index cc61a4e2bd6..b03822946da 100644 --- a/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts +++ b/packages/storage/__tests__/providers/s3/apis/uploadData/putObjectJob.test.ts @@ -25,6 +25,8 @@ jest.mock('@aws-amplify/core', () => ({ }, }, })); + +const testPath = 'testPath/object'; const credentials: AWSCredentials = { accessKeyId: 'accessKeyId', sessionToken: 'sessionToken', @@ -51,29 +53,28 @@ mockPutObject.mockResolvedValue({ VersionId: 'versionId', }); -// TODO[AllanZhengYP]: add more unit tests to cover different access level combination. -// TODO[AllanZhengYP]: add more unit tests to cover validations errors and service errors. -describe('putObjectJob', () => { +/* TODO Remove suite when `key` parameter is removed */ +describe('putObjectJob with key', () => { it('should supply the correct parameters to putObject API handler', async () => { const abortController = new AbortController(); - const key = 'key'; + const inputKey = 'key'; const data = 'data'; - const contentType = 'contentType'; + const mockContentType = 'contentType'; const contentDisposition = 'contentDisposition'; const contentEncoding = 'contentEncoding'; - const metadata = { key: 'value' }; + const mockMetadata = { key: 'value' }; const onProgress = jest.fn(); const useAccelerateEndpoint = true; const job = putObjectJob( { - key, + key: inputKey, data, options: { contentDisposition, contentEncoding, - contentType, - metadata, + contentType: mockContentType, + metadata: mockMetadata, onProgress, useAccelerateEndpoint, }, @@ -82,7 +83,7 @@ describe('putObjectJob', () => { ); const result = await job(); expect(result).toEqual({ - key, + key: inputKey, eTag: 'eTag', versionId: 'versionId', contentType: 'contentType', @@ -100,12 +101,12 @@ describe('putObjectJob', () => { }, { Bucket: 'bucket', - Key: `public/${key}`, + Key: `public/${inputKey}`, Body: data, - ContentType: contentType, + ContentType: mockContentType, ContentDisposition: contentDisposition, ContentEncoding: contentEncoding, - Metadata: metadata, + Metadata: mockMetadata, ContentMD5: undefined, }, ); @@ -130,3 +131,92 @@ describe('putObjectJob', () => { expect(calculateContentMd5).toHaveBeenCalledWith('data'); }); }); + +describe('putObjectJob with path', () => { + test.each([ + { + path: testPath, + expectedKey: testPath, + }, + { + path: () => testPath, + expectedKey: testPath, + }, + ])( + 'should supply the correct parameters to putObject API handler when path is $path', + async ({ path: inputPath, expectedKey }) => { + const abortController = new AbortController(); + const data = 'data'; + const mockContentType = 'contentType'; + const contentDisposition = 'contentDisposition'; + const contentEncoding = 'contentEncoding'; + const mockMetadata = { key: 'value' }; + const onProgress = jest.fn(); + const useAccelerateEndpoint = true; + + const job = putObjectJob( + { + path: inputPath, + data, + options: { + contentDisposition, + contentEncoding, + contentType: mockContentType, + metadata: mockMetadata, + onProgress, + useAccelerateEndpoint, + }, + }, + abortController.signal, + ); + const result = await job(); + expect(result).toEqual({ + path: expectedKey, + eTag: 'eTag', + versionId: 'versionId', + contentType: 'contentType', + metadata: { key: 'value' }, + size: undefined, + }); + expect(mockPutObject).toHaveBeenCalledWith( + { + credentials, + region: 'region', + abortSignal: abortController.signal, + onUploadProgress: expect.any(Function), + useAccelerateEndpoint: true, + userAgentValue: expect.any(String), + }, + { + Bucket: 'bucket', + Key: expectedKey, + Body: data, + ContentType: mockContentType, + ContentDisposition: contentDisposition, + ContentEncoding: contentEncoding, + Metadata: mockMetadata, + ContentMD5: undefined, + }, + ); + }, + ); + + it('should set ContentMD5 if object lock is enabled', async () => { + Amplify.libraryOptions = { + Storage: { + S3: { + isObjectLockEnabled: true, + }, + }, + }; + const job = putObjectJob( + { + path: testPath, + data: 'data', + }, + new AbortController().signal, + ); + await job(); + expect(calculateContentMd5).toHaveBeenCalledWith('data'); + }); +}); diff --git a/packages/storage/__tests__/providers/s3/apis/utils/isInputWithPath.test.ts b/packages/storage/__tests__/providers/s3/apis/utils/isInputWithPath.test.ts new file mode 100644 index 00000000000..3a0c96de924 --- /dev/null +++ b/packages/storage/__tests__/providers/s3/apis/utils/isInputWithPath.test.ts @@ -0,0 +1,13 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { isInputWithPath } from '../../../../../src/providers/s3/utils'; + +describe('isInputWithPath', () => { + it('should return true if input contains path', async () => { + expect(isInputWithPath({ path: '' })).toBe(true); + }); + it('should return false if input does not contain path', async () => { + expect(isInputWithPath({ key: '' })).toBe(false); + }); +}); diff --git a/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInput.test.ts b/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInput.test.ts new file mode 100644 index 00000000000..14b1f6204f0 --- /dev/null +++ b/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInput.test.ts @@ -0,0 +1,71 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + StorageValidationErrorCode, + validationErrorMap, +} from '../../../../../src/errors/types/validation'; +import { validateStorageOperationInput } from '../../../../../src/providers/s3/utils'; +import { + STORAGE_INPUT_KEY, + STORAGE_INPUT_PATH, +} from '../../../../../src/providers/s3/utils/constants'; + +describe('validateStorageOperationInput', () => { + it('should return inputType as STORAGE_INPUT_PATH and objectKey as testPath when input is path as string', () => { + const input = { path: 'testPath' }; + const result = validateStorageOperationInput(input); + expect(result).toEqual({ + inputType: STORAGE_INPUT_PATH, + objectKey: 'testPath', + }); + }); + + it('should return inputType as STORAGE_INPUT_PATH and objectKey as result of path function when input is path as function', () => { + const input = { + path: ({ identityId }: { identityId?: string }) => + `testPath/${identityId}`, + }; + const result = validateStorageOperationInput(input, '123'); + expect(result).toEqual({ + inputType: STORAGE_INPUT_PATH, + objectKey: 'testPath/123', + }); + }); + + it('should return inputType as STORAGE_INPUT_KEY and objectKey as testKey when input is key', () => { + const input = { key: 'testKey' }; + const result = validateStorageOperationInput(input); + expect(result).toEqual({ + inputType: STORAGE_INPUT_KEY, + objectKey: 'testKey', + }); + }); + + it('should throw an error when input path starts with a /', () => { + const input = { path: '/leading-slash-path' }; + expect(() => validateStorageOperationInput(input)).toThrow( + validationErrorMap[ + StorageValidationErrorCode.InvalidStoragePathInput + ].message, + ); + }); + + it('should throw an error when key and path are not specified', () => { + const input = { invalid: 'test' } as any; + expect(() => validateStorageOperationInput(input)).toThrow( + validationErrorMap[ + StorageValidationErrorCode.InvalidStorageOperationInput + ].message, + ); + }); + + it('should throw an error when both key & path are specified', () => { + const input = { path: 'testPath/object', key: 'key' } as any; + expect(() => validateStorageOperationInput(input)).toThrow( + validationErrorMap[ + StorageValidationErrorCode.InvalidStorageOperationInput + ].message, + ); + }); +}); diff --git a/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInputWithPrefix.test.ts b/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInputWithPrefix.test.ts new file mode 100644 index 00000000000..3be7aa1b50d --- /dev/null +++ b/packages/storage/__tests__/providers/s3/apis/utils/validateStorageOperationInputWithPrefix.test.ts @@ -0,0 +1,70 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + StorageValidationErrorCode, + validationErrorMap, +} from '../../../../../src/errors/types/validation'; +import { validateStorageOperationInputWithPrefix } from '../../../../../src/providers/s3/utils'; +import { + STORAGE_INPUT_PATH, + STORAGE_INPUT_PREFIX, +} from '../../../../../src/providers/s3/utils/constants'; + +describe('validateStorageOperationInputWithPrefix', () => { + it('should return inputType as STORAGE_INPUT_PATH and objectKey as testPath when input is path as string', () => { + const input = { path: 'testPath' }; + const result = validateStorageOperationInputWithPrefix(input); + expect(result).toEqual({ + inputType: STORAGE_INPUT_PATH, + objectKey: 'testPath', + }); + }); + + it('should return inputType as STORAGE_INPUT_PATH and objectKey as result of path function when input is path as function', () => { + const input = { + path: ({ identityId }: { identityId?: string }) => + `testPath/${identityId}`, + }; + const result = validateStorageOperationInputWithPrefix(input, '123'); + expect(result).toEqual({ + inputType: STORAGE_INPUT_PATH, + objectKey: 'testPath/123', + }); + }); + + it('should return inputType as STORAGE_INPUT_PREFIX and objectKey as testKey when input is prefix', () => { + const input = { prefix: 'testKey' }; + const result = validateStorageOperationInputWithPrefix(input); + expect(result).toEqual({ + inputType: STORAGE_INPUT_PREFIX, + objectKey: 'testKey', + }); + }); + + it('should take a default prefix when input has invalid objects', () => { + const input = { invalid: 'test' } as any; + const result = validateStorageOperationInputWithPrefix(input); + expect(result).toEqual({ + inputType: STORAGE_INPUT_PREFIX, + objectKey: '', + }); + }); + + it('should throw an error when input path starts with a /', () => { + const input = { path: '/test' } as any; + expect(() => validateStorageOperationInputWithPrefix(input)).toThrow( + validationErrorMap[StorageValidationErrorCode.InvalidStoragePathInput] + .message, + ); + }); + + it('should throw an error when input has both path and prefix', () => { + const input = { prefix: 'testPrefix', path: 'test' } as any; + expect(() => validateStorageOperationInputWithPrefix(input)).toThrow( + validationErrorMap[ + StorageValidationErrorCode.InvalidStorageOperationPrefixInput + ].message, + ); + }); +}); diff --git a/packages/storage/src/errors/types/validation.ts b/packages/storage/src/errors/types/validation.ts index f596b8d8e25..d72b9852162 100644 --- a/packages/storage/src/errors/types/validation.ts +++ b/packages/storage/src/errors/types/validation.ts @@ -9,11 +9,16 @@ export enum StorageValidationErrorCode { NoKey = 'NoKey', NoSourceKey = 'NoSourceKey', NoDestinationKey = 'NoDestinationKey', + NoSourcePath = 'NoSourcePath', + NoDestinationPath = 'NoDestinationPath', NoBucket = 'NoBucket', NoRegion = 'NoRegion', - UrlExpirationMaxLimitExceed = 'UrlExpirationMaxLimitExceed', - ObjectIsTooLarge = 'ObjectIsTooLarge', + InvalidStorageOperationPrefixInput = 'InvalidStorageOperationPrefixInput', + InvalidStorageOperationInput = 'InvalidStorageOperationInput', + InvalidStoragePathInput = 'InvalidStoragePathInput', InvalidUploadSource = 'InvalidUploadSource', + ObjectIsTooLarge = 'ObjectIsTooLarge', + UrlExpirationMaxLimitExceed = 'UrlExpirationMaxLimitExceed', } export const validationErrorMap: AmplifyErrorMap = { @@ -33,6 +38,12 @@ export const validationErrorMap: AmplifyErrorMap = { [StorageValidationErrorCode.NoDestinationKey]: { message: 'Missing destination key in copy api call.', }, + [StorageValidationErrorCode.NoSourcePath]: { + message: 'Missing source path in copy api call.', + }, + [StorageValidationErrorCode.NoDestinationPath]: { + message: 'Missing destination path in copy api call.', + }, [StorageValidationErrorCode.NoBucket]: { message: 'Missing bucket name while accessing object.', }, @@ -49,4 +60,14 @@ export const validationErrorMap: AmplifyErrorMap = { message: 'Upload source type can only be a `Blob`, `File`, `ArrayBuffer`, or `string`.', }, + [StorageValidationErrorCode.InvalidStorageOperationInput]: { + message: + 'Path or key parameter must be specified in the input. Both can not be specified at the same time.', + }, + [StorageValidationErrorCode.InvalidStorageOperationPrefixInput]: { + message: 'Both path and prefix can not be specified at the same time.', + }, + [StorageValidationErrorCode.InvalidStoragePathInput]: { + message: 'Input `path` does not allow a leading slash (/).', + }, }; diff --git a/packages/storage/src/index.ts b/packages/storage/src/index.ts index 6183ed3a2d5..45bf9734a66 100644 --- a/packages/storage/src/index.ts +++ b/packages/storage/src/index.ts @@ -13,24 +13,40 @@ export { export { UploadDataInput, + UploadDataWithPathInput, DownloadDataInput, + DownloadDataWithPathInput, RemoveInput, + RemoveWithPathInput, ListAllInput, + ListAllWithPathInput, ListPaginateInput, + ListPaginateWithPathInput, GetPropertiesInput, + GetPropertiesWithPathInput, CopyInput, + CopyWithPathInput, GetUrlInput, + GetUrlWithPathInput, } from './providers/s3/types/inputs'; export { UploadDataOutput, + UploadDataWithPathOutput, DownloadDataOutput, + DownloadDataWithPathOutput, RemoveOutput, + RemoveWithPathOutput, ListAllOutput, + ListAllWithPathOutput, ListPaginateOutput, + ListPaginateWithPathOutput, GetPropertiesOutput, + GetPropertiesWithPathOutput, CopyOutput, + CopyWithPathOutput, GetUrlOutput, + GetUrlWithPathOutput, } from './providers/s3/types/outputs'; export { TransferProgressEvent } from './types'; diff --git a/packages/storage/src/providers/s3/apis/copy.ts b/packages/storage/src/providers/s3/apis/copy.ts index ca0ae3e8a39..763ff45829b 100644 --- a/packages/storage/src/providers/s3/apis/copy.ts +++ b/packages/storage/src/providers/s3/apis/copy.ts @@ -3,21 +3,40 @@ import { Amplify } from '@aws-amplify/core'; -import { CopyInput, CopyOutput, S3Exception } from '../types'; -import { StorageValidationErrorCode } from '../../../errors/types/validation'; +import { + CopyInput, + CopyOutput, + CopyWithPathInput, + CopyWithPathOutput, +} from '../types'; import { copy as copyInternal } from './internal/copy'; /** - * Copy an object from a source object to a new object within the same bucket. Can optionally copy files across - * different level or identityId (if source object's level is 'protected'). + * Copy an object from a source to a destination object within the same bucket. * - * @param input - The CopyInput object. - * @returns Output containing the destination key. - * @throws service: {@link S3Exception} - Thrown when checking for existence of the object - * @throws validation: {@link StorageValidationErrorCode } - Thrown when - * source or destination key are not defined. + * @param input - The `CopyWithPathInput` object. + * @returns Output containing the destination object path. + * @throws service: `S3Exception` - Thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Thrown when + * source or destination path is not defined. */ -export const copy = async (input: CopyInput): Promise => { +export function copy(input: CopyWithPathInput): Promise; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/copy | path} instead. + * + * Copy an object from a source to a destination object within the same bucket. Can optionally copy files across + * different accessLevel or identityId (if source object's accessLevel is 'protected'). + * + * @param input - The `CopyInput` object. + * @returns Output containing the destination object key. + * @throws service: `S3Exception` - Thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Thrown when + * source or destination key is not defined. + */ +export function copy(input: CopyInput): Promise; + +export function copy(input: CopyInput | CopyWithPathInput) { return copyInternal(Amplify, input); -}; +} diff --git a/packages/storage/src/providers/s3/apis/downloadData.ts b/packages/storage/src/providers/s3/apis/downloadData.ts index b10fc35d8ea..7c98ee2b857 100644 --- a/packages/storage/src/providers/s3/apis/downloadData.ts +++ b/packages/storage/src/providers/s3/apis/downloadData.ts @@ -4,33 +4,43 @@ import { Amplify } from '@aws-amplify/core'; import { StorageAction } from '@aws-amplify/core/internals/utils'; -import { DownloadDataInput, DownloadDataOutput, S3Exception } from '../types'; +import { + DownloadDataInput, + DownloadDataOutput, + DownloadDataWithPathInput, + DownloadDataWithPathOutput, +} from '../types'; import { resolveS3ConfigAndInput } from '../utils/resolveS3ConfigAndInput'; -import { StorageValidationErrorCode } from '../../../errors/types/validation'; -import { createDownloadTask } from '../utils'; +import { createDownloadTask, validateStorageOperationInput } from '../utils'; import { getObject } from '../utils/client'; import { getStorageUserAgentValue } from '../utils/userAgent'; import { logger } from '../../../utils'; +import { + StorageDownloadDataOutput, + StorageItemWithKey, + StorageItemWithPath, +} from '../../../types'; +import { STORAGE_INPUT_KEY } from '../utils/constants'; /** * Download S3 object data to memory * - * @param input - The DownloadDataInput object. + * @param input - The `DownloadDataWithPathInput` object. * @returns A cancelable task exposing result promise from `result` property. - * @throws service: {@link S3Exception} - thrown when checking for existence of the object - * @throws validation: {@link StorageValidationErrorCode } - Validation errors + * @throws service: `S3Exception` - thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Validation errors * * @example * ```ts * // Download a file from s3 bucket - * const { body, eTag } = await downloadData({ key, data: file, options: { + * const { body, eTag } = await downloadData({ path, options: { * onProgress, // Optional progress callback. * } }).result; * ``` * @example * ```ts * // Cancel a task - * const downloadTask = downloadData({ key, data: file }); + * const downloadTask = downloadData({ path }); * //... * downloadTask.cancel(); * try { @@ -42,7 +52,47 @@ import { logger } from '../../../utils'; * } *``` */ -export const downloadData = (input: DownloadDataInput): DownloadDataOutput => { +export function downloadData( + input: DownloadDataWithPathInput, +): DownloadDataWithPathOutput; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/download/#downloaddata | path} instead. + * + * Download S3 object data to memory + * + * @param input - The `DownloadDataInput` object. + * @returns A cancelable task exposing result promise from `result` property. + * @throws service: `S3Exception` - thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Validation errors + * + * @example + * ```ts + * // Download a file from s3 bucket + * const { body, eTag } = await downloadData({ key, options: { + * onProgress, // Optional progress callback. + * } }).result; + * ``` + * @example + * ```ts + * // Cancel a task + * const downloadTask = downloadData({ key }); + * //... + * downloadTask.cancel(); + * try { + * await downloadTask.result; + * } catch (error) { + * if(isCancelError(error)) { + * // Handle error thrown by task cancelation. + * } + * } + *``` + */ +export function downloadData(input: DownloadDataInput): DownloadDataOutput; + +export function downloadData( + input: DownloadDataInput | DownloadDataWithPathInput, +) { const abortController = new AbortController(); const downloadTask = createDownloadTask({ @@ -53,21 +103,27 @@ export const downloadData = (input: DownloadDataInput): DownloadDataOutput => { }); return downloadTask; -}; +} const downloadDataJob = ( - { options: downloadDataOptions, key }: DownloadDataInput, + downloadDataInput: DownloadDataInput | DownloadDataWithPathInput, abortSignal: AbortSignal, ) => - async () => { - const { bucket, keyPrefix, s3Config } = await resolveS3ConfigAndInput( - Amplify, - downloadDataOptions, + async (): Promise< + StorageDownloadDataOutput + > => { + const { options: downloadDataOptions } = downloadDataInput; + const { bucket, keyPrefix, s3Config, identityId } = + await resolveS3ConfigAndInput(Amplify, downloadDataOptions); + const { inputType, objectKey } = validateStorageOperationInput( + downloadDataInput, + identityId, ); - const finalKey = keyPrefix + key; + const finalKey = + inputType === STORAGE_INPUT_KEY ? keyPrefix + objectKey : objectKey; - logger.debug(`download ${key} from ${finalKey}.`); + logger.debug(`download ${objectKey} from ${finalKey}.`); const { Body: body, @@ -93,8 +149,7 @@ const downloadDataJob = }, ); - return { - key, + const result = { body, lastModified, size, @@ -103,4 +158,8 @@ const downloadDataJob = metadata, versionId, }; + + return inputType === STORAGE_INPUT_KEY + ? { key: objectKey, ...result } + : { path: objectKey, ...result }; }; diff --git a/packages/storage/src/providers/s3/apis/getProperties.ts b/packages/storage/src/providers/s3/apis/getProperties.ts index 4b98d529f55..630d0b1c467 100644 --- a/packages/storage/src/providers/s3/apis/getProperties.ts +++ b/packages/storage/src/providers/s3/apis/getProperties.ts @@ -3,8 +3,12 @@ import { Amplify } from '@aws-amplify/core'; -import { GetPropertiesInput, GetPropertiesOutput, S3Exception } from '../types'; -import { StorageValidationErrorCode } from '../../../errors/types/validation'; +import { + GetPropertiesInput, + GetPropertiesOutput, + GetPropertiesWithPathInput, + GetPropertiesWithPathOutput, +} from '../types'; import { getProperties as getPropertiesInternal } from './internal/getProperties'; @@ -12,13 +16,32 @@ import { getProperties as getPropertiesInternal } from './internal/getProperties * Gets the properties of a file. The properties include S3 system metadata and * the user metadata that was provided when uploading the file. * - * @param input - The GetPropertiesInput object. + * @param input - The `GetPropertiesWithPathInput` object. * @returns Requested object properties. - * @throws A {@link S3Exception} when the underlying S3 service returned error. - * @throws A {@link StorageValidationErrorCode} when API call parameters are invalid. + * @throws An `S3Exception` when the underlying S3 service returned error. + * @throws A `StorageValidationErrorCode` when API call parameters are invalid. */ -export const getProperties = ( +export function getProperties( + input: GetPropertiesWithPathInput, +): Promise; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/javascript/build-a-backend/storage/get-properties/ | path} instead. + * + * Gets the properties of a file. The properties include S3 system metadata and + * the user metadata that was provided when uploading the file. + * + * @param input - The `GetPropertiesInput` object. + * @returns Requested object properties. + * @throws An `S3Exception` when the underlying S3 service returned error. + * @throws A `StorageValidationErrorCode` when API call parameters are invalid. + */ +export function getProperties( input: GetPropertiesInput, -): Promise => { +): Promise; + +export function getProperties( + input: GetPropertiesInput | GetPropertiesWithPathInput, +) { return getPropertiesInternal(Amplify, input); -}; +} diff --git a/packages/storage/src/providers/s3/apis/getUrl.ts b/packages/storage/src/providers/s3/apis/getUrl.ts index 708e601cc87..aafe1f282b3 100644 --- a/packages/storage/src/providers/s3/apis/getUrl.ts +++ b/packages/storage/src/providers/s3/apis/getUrl.ts @@ -3,9 +3,12 @@ import { Amplify } from '@aws-amplify/core'; -import { StorageValidationErrorCode } from '../../../errors/types/validation'; -import { GetUrlInput, GetUrlOutput, S3Exception } from '../types'; -import { StorageError } from '../../../errors/StorageError'; +import { + GetUrlInput, + GetUrlOutput, + GetUrlWithPathInput, + GetUrlWithPathOutput, +} from '../types'; import { getUrl as getUrlInternal } from './internal/getUrl'; @@ -16,15 +19,39 @@ import { getUrl as getUrlInternal } from './internal/getUrl'; * * By default, it will not validate the object that exists in S3. If you set the `options.validateObjectExistence` * to true, this method will verify the given object already exists in S3 before returning a presigned - * URL, and will throw {@link StorageError} if the object does not exist. + * URL, and will throw `StorageError` if the object does not exist. * - * @param input - The GetUrlInput object. - * @returns Presigned URL and timestamp when the URL MAY expire. - * @throws service: {@link S3Exception} - thrown when checking for existence of the object - * @throws validation: {@link StorageValidationErrorCode } - Validation errors + * @param input - The `GetUrlWithPathInput` object. + * @returns Presigned URL and timestamp when the URL may expire. + * @throws service: `S3Exception` - thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Validation errors * thrown either username or key are not defined. * */ -export const getUrl = (input: GetUrlInput): Promise => { +export function getUrl( + input: GetUrlWithPathInput, +): Promise; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/javascript/build-a-backend/storage/download/#generate-a-download-url | path} instead. + * + * Get a temporary presigned URL to download the specified S3 object. + * The presigned URL expires when the associated role used to sign the request expires or + * the option `expiresIn` is reached. The `expiresAt` property in the output object indicates when the URL MAY expire. + * + * By default, it will not validate the object that exists in S3. If you set the `options.validateObjectExistence` + * to true, this method will verify the given object already exists in S3 before returning a presigned + * URL, and will throw `StorageError` if the object does not exist. + * + * @param input - The `GetUrlInput` object. + * @returns Presigned URL and timestamp when the URL may expire. + * @throws service: `S3Exception` - thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Validation errors + * thrown either username or key are not defined. + * + */ +export function getUrl(input: GetUrlInput): Promise; + +export function getUrl(input: GetUrlInput | GetUrlWithPathInput) { return getUrlInternal(Amplify, input); -}; +} diff --git a/packages/storage/src/providers/s3/apis/internal/copy.ts b/packages/storage/src/providers/s3/apis/internal/copy.ts index feefdd1b5c3..e0c96a1fba4 100644 --- a/packages/storage/src/providers/s3/apis/internal/copy.ts +++ b/packages/storage/src/providers/s3/apis/internal/copy.ts @@ -4,15 +4,76 @@ import { AmplifyClassV6 } from '@aws-amplify/core'; import { StorageAction } from '@aws-amplify/core/internals/utils'; -import { CopyInput, CopyOutput } from '../../types'; -import { resolveS3ConfigAndInput } from '../../utils'; +import { + CopyInput, + CopyOutput, + CopyWithPathInput, + CopyWithPathOutput, +} from '../../types'; +import { ResolvedS3Config } from '../../types/options'; +import { + isInputWithPath, + resolveS3ConfigAndInput, + validateStorageOperationInput, +} from '../../utils'; import { StorageValidationErrorCode } from '../../../../errors/types/validation'; import { assertValidationError } from '../../../../errors/utils/assertValidationError'; import { copyObject } from '../../utils/client'; import { getStorageUserAgentValue } from '../../utils/userAgent'; import { logger } from '../../../../utils'; +const isCopyInputWithPath = ( + input: CopyInput | CopyWithPathInput, +): input is CopyWithPathInput => isInputWithPath(input.source); + export const copy = async ( + amplify: AmplifyClassV6, + input: CopyInput | CopyWithPathInput, +): Promise => { + return isCopyInputWithPath(input) + ? copyWithPath(amplify, input) + : copyWithKey(amplify, input); +}; + +const copyWithPath = async ( + amplify: AmplifyClassV6, + input: CopyWithPathInput, +): Promise => { + const { source, destination } = input; + const { s3Config, bucket, identityId } = + await resolveS3ConfigAndInput(amplify); + + assertValidationError(!!source.path, StorageValidationErrorCode.NoSourcePath); + assertValidationError( + !!destination.path, + StorageValidationErrorCode.NoDestinationPath, + ); + + const { objectKey: sourcePath } = validateStorageOperationInput( + source, + identityId, + ); + const { objectKey: destinationPath } = validateStorageOperationInput( + destination, + identityId, + ); + + const finalCopySource = `${bucket}/${sourcePath}`; + const finalCopyDestination = destinationPath; + logger.debug(`copying "${finalCopySource}" to "${finalCopyDestination}".`); + + await serviceCopy({ + source: finalCopySource, + destination: finalCopyDestination, + bucket, + s3Config, + }); + + return { path: finalCopyDestination }; +}; + +/** @deprecated Use {@link copyWithPath} instead. */ +export const copyWithKey = async ( amplify: AmplifyClassV6, input: CopyInput, ): Promise => { @@ -41,6 +102,30 @@ export const copy = async ( const finalCopySource = `${bucket}/${sourceKeyPrefix}${sourceKey}`; const finalCopyDestination = `${destinationKeyPrefix}${destinationKey}`; logger.debug(`copying "${finalCopySource}" to "${finalCopyDestination}".`); + + await serviceCopy({ + source: finalCopySource, + destination: finalCopyDestination, + bucket, + s3Config, + }); + + return { + key: destinationKey, + }; +}; + +const serviceCopy = async ({ + source, + destination, + bucket, + s3Config, +}: { + source: string; + destination: string; + bucket: string; + s3Config: ResolvedS3Config; +}) => { await copyObject( { ...s3Config, @@ -48,13 +133,9 @@ export const copy = async ( }, { Bucket: bucket, - CopySource: finalCopySource, - Key: finalCopyDestination, + CopySource: source, + Key: destination, MetadataDirective: 'COPY', // Copies over metadata like contentType as well }, ); - - return { - key: destinationKey, - }; }; diff --git a/packages/storage/src/providers/s3/apis/internal/getProperties.ts b/packages/storage/src/providers/s3/apis/internal/getProperties.ts index db854f8635b..3b61460d89b 100644 --- a/packages/storage/src/providers/s3/apis/internal/getProperties.ts +++ b/packages/storage/src/providers/s3/apis/internal/getProperties.ts @@ -4,25 +4,37 @@ import { AmplifyClassV6 } from '@aws-amplify/core'; import { StorageAction } from '@aws-amplify/core/internals/utils'; -import { GetPropertiesInput, GetPropertiesOutput } from '../../types'; -import { resolveS3ConfigAndInput } from '../../utils'; +import { + GetPropertiesInput, + GetPropertiesOutput, + GetPropertiesWithPathInput, + GetPropertiesWithPathOutput, +} from '../../types'; +import { + resolveS3ConfigAndInput, + validateStorageOperationInput, +} from '../../utils'; import { headObject } from '../../utils/client'; import { getStorageUserAgentValue } from '../../utils/userAgent'; import { logger } from '../../../../utils'; +import { STORAGE_INPUT_KEY } from '../../utils/constants'; export const getProperties = async ( amplify: AmplifyClassV6, - input: GetPropertiesInput, + input: GetPropertiesInput | GetPropertiesWithPathInput, action?: StorageAction, -): Promise => { - const { key, options } = input; - const { s3Config, bucket, keyPrefix } = await resolveS3ConfigAndInput( - amplify, - options, +): Promise => { + const { options: getPropertiesOptions } = input; + const { s3Config, bucket, keyPrefix, identityId } = + await resolveS3ConfigAndInput(amplify, getPropertiesOptions); + const { inputType, objectKey } = validateStorageOperationInput( + input, + identityId, ); - const finalKey = `${keyPrefix}${key}`; + const finalKey = + inputType === STORAGE_INPUT_KEY ? keyPrefix + objectKey : objectKey; - logger.debug(`get properties of ${key} from ${finalKey}`); + logger.debug(`get properties of ${objectKey} from ${finalKey}`); const response = await headObject( { ...s3Config, @@ -36,8 +48,7 @@ export const getProperties = async ( }, ); - return { - key, + const result = { contentType: response.ContentType, size: response.ContentLength, eTag: response.ETag, @@ -45,4 +56,8 @@ export const getProperties = async ( metadata: response.Metadata, versionId: response.VersionId, }; + + return inputType === STORAGE_INPUT_KEY + ? { key: objectKey, ...result } + : { path: objectKey, ...result }; }; diff --git a/packages/storage/src/providers/s3/apis/internal/getUrl.ts b/packages/storage/src/providers/s3/apis/internal/getUrl.ts index 15110d535b7..a2de5d3f770 100644 --- a/packages/storage/src/providers/s3/apis/internal/getUrl.ts +++ b/packages/storage/src/providers/s3/apis/internal/getUrl.ts @@ -4,34 +4,48 @@ import { AmplifyClassV6 } from '@aws-amplify/core'; import { StorageAction } from '@aws-amplify/core/internals/utils'; -import { GetUrlInput, GetUrlOutput } from '../../types'; +import { + GetUrlInput, + GetUrlOutput, + GetUrlWithPathInput, + GetUrlWithPathOutput, +} from '../../types'; import { StorageValidationErrorCode } from '../../../../errors/types/validation'; import { getPresignedGetObjectUrl } from '../../utils/client'; -import { resolveS3ConfigAndInput } from '../../utils'; +import { + resolveS3ConfigAndInput, + validateStorageOperationInput, +} from '../../utils'; import { assertValidationError } from '../../../../errors/utils/assertValidationError'; import { DEFAULT_PRESIGN_EXPIRATION, MAX_URL_EXPIRATION, + STORAGE_INPUT_KEY, } from '../../utils/constants'; import { getProperties } from './getProperties'; export const getUrl = async ( amplify: AmplifyClassV6, - input: GetUrlInput, -): Promise => { - const { key, options } = input; + input: GetUrlInput | GetUrlWithPathInput, +): Promise => { + const { options: getUrlOptions } = input; + const { s3Config, keyPrefix, bucket, identityId } = + await resolveS3ConfigAndInput(amplify, getUrlOptions); + const { inputType, objectKey } = validateStorageOperationInput( + input, + identityId, + ); - if (options?.validateObjectExistence) { - await getProperties(amplify, { key, options }, StorageAction.GetUrl); - } + const finalKey = + inputType === STORAGE_INPUT_KEY ? keyPrefix + objectKey : objectKey; - const { s3Config, keyPrefix, bucket } = await resolveS3ConfigAndInput( - amplify, - options, - ); + if (getUrlOptions?.validateObjectExistence) { + await getProperties(amplify, input, StorageAction.GetUrl); + } - let urlExpirationInSec = options?.expiresIn ?? DEFAULT_PRESIGN_EXPIRATION; + let urlExpirationInSec = + getUrlOptions?.expiresIn ?? DEFAULT_PRESIGN_EXPIRATION; const awsCredExpiration = s3Config.credentials?.expiration; if (awsCredExpiration) { const awsCredExpirationInSec = Math.floor( @@ -54,7 +68,7 @@ export const getUrl = async ( }, { Bucket: bucket, - Key: `${keyPrefix}${key}`, + Key: finalKey, }, ), expiresAt: new Date(Date.now() + urlExpirationInSec * 1000), diff --git a/packages/storage/src/providers/s3/apis/internal/list.ts b/packages/storage/src/providers/s3/apis/internal/list.ts index 1d49ed5942b..f180dfe5247 100644 --- a/packages/storage/src/providers/s3/apis/internal/list.ts +++ b/packages/storage/src/providers/s3/apis/internal/list.ts @@ -7,11 +7,19 @@ import { StorageAction } from '@aws-amplify/core/internals/utils'; import { ListAllInput, ListAllOutput, + ListAllWithPathInput, + ListAllWithPathOutput, ListOutputItem, + ListOutputItemWithPath, ListPaginateInput, ListPaginateOutput, + ListPaginateWithPathInput, + ListPaginateWithPathOutput, } from '../../types'; -import { resolveS3ConfigAndInput } from '../../utils'; +import { + resolveS3ConfigAndInput, + validateStorageOperationInputWithPrefix, +} from '../../utils'; import { ResolvedS3Config } from '../../types/options'; import { ListObjectsV2Input, @@ -20,25 +28,43 @@ import { } from '../../utils/client'; import { getStorageUserAgentValue } from '../../utils/userAgent'; import { logger } from '../../../../utils'; +import { STORAGE_INPUT_PREFIX } from '../../utils/constants'; const MAX_PAGE_SIZE = 1000; interface ListInputArgs { s3Config: ResolvedS3Config; listParams: ListObjectsV2Input; - prefix: string; + generatedPrefix?: string; } export const list = async ( amplify: AmplifyClassV6, - input?: ListAllInput | ListPaginateInput, -): Promise => { - const { options = {}, prefix: path = '' } = input ?? {}; + input: + | ListAllInput + | ListPaginateInput + | ListAllWithPathInput + | ListPaginateWithPathInput, +): Promise< + | ListAllOutput + | ListPaginateOutput + | ListAllWithPathOutput + | ListPaginateWithPathOutput +> => { + const { options = {} } = input; const { s3Config, bucket, - keyPrefix: prefix, + keyPrefix: generatedPrefix, + identityId, } = await resolveS3ConfigAndInput(amplify, options); + + const { inputType, objectKey } = validateStorageOperationInputWithPrefix( + input, + identityId, + ); + const isInputWithPrefix = inputType === STORAGE_INPUT_PREFIX; + // @ts-expect-error pageSize and nextToken should not coexist with listAll if (options?.listAll && (options?.pageSize || options?.nextToken)) { const anyOptions = options as any; @@ -50,34 +76,53 @@ export const list = async ( } const listParams = { Bucket: bucket, - Prefix: `${prefix}${path}`, + Prefix: isInputWithPrefix ? `${generatedPrefix}${objectKey}` : objectKey, MaxKeys: options?.listAll ? undefined : options?.pageSize, ContinuationToken: options?.listAll ? undefined : options?.nextToken, }; logger.debug(`listing items from "${listParams.Prefix}"`); - return options.listAll - ? _listAll({ s3Config, listParams, prefix }) - : _list({ s3Config, listParams, prefix }); + const listInputArgs: ListInputArgs = { + s3Config, + listParams, + }; + if (options.listAll) { + if (isInputWithPrefix) { + return _listAllWithPrefix({ + ...listInputArgs, + generatedPrefix, + }); + } else { + return _listAllWithPath(listInputArgs); + } + } else { + if (isInputWithPrefix) { + return _listWithPrefix({ ...listInputArgs, generatedPrefix }); + } else { + return _listWithPath(listInputArgs); + } + } }; -const _listAll = async ({ +/** @deprecated Use {@link _listAllWithPath} instead. */ +const _listAllWithPrefix = async ({ s3Config, listParams, - prefix, + generatedPrefix, }: ListInputArgs): Promise => { const listResult: ListOutputItem[] = []; let continuationToken = listParams.ContinuationToken; do { - const { items: pageResults, nextToken: pageNextToken } = await _list({ - prefix, - s3Config, - listParams: { - ...listParams, - ContinuationToken: continuationToken, - MaxKeys: MAX_PAGE_SIZE, - }, - }); + const { items: pageResults, nextToken: pageNextToken } = + await _listWithPrefix({ + generatedPrefix, + s3Config, + listParams: { + ...listParams, + ContinuationToken: continuationToken, + MaxKeys: MAX_PAGE_SIZE, + }, + }); listResult.push(...pageResults); continuationToken = pageNextToken; } while (continuationToken); @@ -87,10 +132,11 @@ const _listAll = async ({ }; }; -const _list = async ({ +/** @deprecated Use {@link _listWithPath} instead. */ +const _listWithPrefix = async ({ s3Config, listParams, - prefix, + generatedPrefix, }: ListInputArgs): Promise => { const listParamsClone = { ...listParams }; if (!listParamsClone.MaxKeys || listParamsClone.MaxKeys > MAX_PAGE_SIZE) { @@ -112,15 +158,75 @@ const _list = async ({ }; } - const listResult = response.Contents.map(item => ({ - key: item.Key!.substring(prefix.length), - eTag: item.ETag, - lastModified: item.LastModified, - size: item.Size, - })); + return { + items: response.Contents.map(item => ({ + key: generatedPrefix + ? item.Key!.substring(generatedPrefix.length) + : item.Key!, + eTag: item.ETag, + lastModified: item.LastModified, + size: item.Size, + })), + nextToken: response.NextContinuationToken, + }; +}; + +const _listAllWithPath = async ({ + s3Config, + listParams, +}: ListInputArgs): Promise => { + const listResult: ListOutputItemWithPath[] = []; + let continuationToken = listParams.ContinuationToken; + do { + const { items: pageResults, nextToken: pageNextToken } = + await _listWithPath({ + s3Config, + listParams: { + ...listParams, + ContinuationToken: continuationToken, + MaxKeys: MAX_PAGE_SIZE, + }, + }); + listResult.push(...pageResults); + continuationToken = pageNextToken; + } while (continuationToken); return { items: listResult, + }; +}; + +const _listWithPath = async ({ + s3Config, + listParams, +}: ListInputArgs): Promise => { + const listParamsClone = { ...listParams }; + if (!listParamsClone.MaxKeys || listParamsClone.MaxKeys > MAX_PAGE_SIZE) { + logger.debug(`defaulting pageSize to ${MAX_PAGE_SIZE}.`); + listParamsClone.MaxKeys = MAX_PAGE_SIZE; + } + + const response: ListObjectsV2Output = await listObjectsV2( + { + ...s3Config, + userAgentValue: getStorageUserAgentValue(StorageAction.List), + }, + listParamsClone, + ); + + if (!response?.Contents) { + return { + items: [], + }; + } + + return { + items: response.Contents.map(item => ({ + path: item.Key!, + eTag: item.ETag, + lastModified: item.LastModified, + size: item.Size, + })), nextToken: response.NextContinuationToken, }; }; diff --git a/packages/storage/src/providers/s3/apis/internal/remove.ts b/packages/storage/src/providers/s3/apis/internal/remove.ts index 7eae6bc5854..bc0fa4a2ade 100644 --- a/packages/storage/src/providers/s3/apis/internal/remove.ts +++ b/packages/storage/src/providers/s3/apis/internal/remove.ts @@ -4,24 +4,43 @@ import { AmplifyClassV6 } from '@aws-amplify/core'; import { StorageAction } from '@aws-amplify/core/internals/utils'; -import { RemoveInput, RemoveOutput } from '../../types'; -import { resolveS3ConfigAndInput } from '../../utils'; +import { + RemoveInput, + RemoveOutput, + RemoveWithPathInput, + RemoveWithPathOutput, +} from '../../types'; +import { + resolveS3ConfigAndInput, + validateStorageOperationInput, +} from '../../utils'; import { deleteObject } from '../../utils/client'; import { getStorageUserAgentValue } from '../../utils/userAgent'; import { logger } from '../../../../utils'; +import { STORAGE_INPUT_KEY } from '../../utils/constants'; export const remove = async ( amplify: AmplifyClassV6, - input: RemoveInput, -): Promise => { - const { key, options = {} } = input; - const { s3Config, keyPrefix, bucket } = await resolveS3ConfigAndInput( - amplify, - options, + input: RemoveInput | RemoveWithPathInput, +): Promise => { + const { options = {} } = input ?? {}; + const { s3Config, keyPrefix, bucket, identityId } = + await resolveS3ConfigAndInput(amplify, options); + + const { inputType, objectKey } = validateStorageOperationInput( + input, + identityId, ); - const finalKey = `${keyPrefix}${key}`; - logger.debug(`remove "${key}" from "${finalKey}".`); + let finalKey; + if (inputType === STORAGE_INPUT_KEY) { + finalKey = `${keyPrefix}${objectKey}`; + logger.debug(`remove "${objectKey}" from "${finalKey}".`); + } else { + finalKey = objectKey; + logger.debug(`removing object in path "${finalKey}"`); + } + await deleteObject( { ...s3Config, @@ -33,7 +52,11 @@ export const remove = async ( }, ); - return { - key, - }; + return inputType === STORAGE_INPUT_KEY + ? { + key: objectKey, + } + : { + path: objectKey, + }; }; diff --git a/packages/storage/src/providers/s3/apis/list.ts b/packages/storage/src/providers/s3/apis/list.ts index 0778252e34d..cd58dbdaacd 100644 --- a/packages/storage/src/providers/s3/apis/list.ts +++ b/packages/storage/src/providers/s3/apis/list.ts @@ -1,41 +1,69 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 - import { Amplify } from '@aws-amplify/core'; import { ListAllInput, ListAllOutput, + ListAllWithPathInput, + ListAllWithPathOutput, ListPaginateInput, ListPaginateOutput, - S3Exception, + ListPaginateWithPathInput, + ListPaginateWithPathOutput, } from '../types'; -import { StorageValidationErrorCode } from '../../../errors/types/validation'; import { list as listInternal } from './internal/list'; -interface ListApi { - /** - * List files with given prefix in pages - * pageSize defaulted to 1000. Additionally, the result will include a nextToken if there are more items to retrieve. - * @param input - The ListPaginateInput object. - * @returns A list of keys and metadata with - * @throws service: {@link S3Exception} - S3 service errors thrown when checking for existence of bucket - * @throws validation: {@link StorageValidationErrorCode } - thrown when there are issues with credentials - */ - (input?: ListPaginateInput): Promise; - /** - * List all files from S3. You can set `listAll` to true in `options` to get all the files from S3. - * @param input - The ListAllInput object. - * @returns A list of keys and metadata for all objects in path - * @throws service: {@link S3Exception} - S3 service errors thrown when checking for existence of bucket - * @throws validation: {@link StorageValidationErrorCode } - thrown when there are issues with credentials - */ - (input?: ListAllInput): Promise; -} +/** + * List files in pages with the given `path`. + * `pageSize` is defaulted to 1000. Additionally, the result will include a `nextToken` if there are more items to retrieve. + * @param input - The `ListPaginateWithPathInput` object. + * @returns A list of objects with path and metadata + * @throws service: `S3Exception` - S3 service errors thrown when checking for existence of bucket + * @throws validation: `StorageValidationErrorCode` - thrown when there are issues with credentials + */ +export function list( + input: ListPaginateWithPathInput, +): Promise; +/** + * List all files from S3 for a given `path`. You can set `listAll` to true in `options` to get all the files from S3. + * @param input - The `ListAllWithPathInput` object. + * @returns A list of all objects with path and metadata + * @throws service: `S3Exception` - S3 service errors thrown when checking for existence of bucket + * @throws validation: `StorageValidationErrorCode` - thrown when there are issues with credentials + */ +export function list( + input: ListAllWithPathInput, +): Promise; +/** + * @deprecated The `prefix` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/list | path} instead. + * List files in pages with the given `prefix`. + * `pageSize` is defaulted to 1000. Additionally, the result will include a `nextToken` if there are more items to retrieve. + * @param input - The `ListPaginateInput` object. + * @returns A list of objects with key and metadata + * @throws service: `S3Exception` - S3 service errors thrown when checking for existence of bucket + * @throws validation: `StorageValidationErrorCode` - thrown when there are issues with credentials + */ +export function list(input?: ListPaginateInput): Promise; +/** + * @deprecated The `prefix` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/list | path} instead. + * List all files from S3 for a given `prefix`. You can set `listAll` to true in `options` to get all the files from S3. + * @param input - The `ListAllInput` object. + * @returns A list of all objects with key and metadata + * @throws service: `S3Exception` - S3 service errors thrown when checking for existence of bucket + * @throws validation: `StorageValidationErrorCode` - thrown when there are issues with credentials + */ +export function list(input?: ListAllInput): Promise; -export const list: ListApi = ( - input?: ListAllInput | ListPaginateInput, -): Promise => { +export function list( + input?: + | ListAllInput + | ListPaginateInput + | ListAllWithPathInput + | ListPaginateWithPathInput, +) { return listInternal(Amplify, input ?? {}); -}; +} diff --git a/packages/storage/src/providers/s3/apis/remove.ts b/packages/storage/src/providers/s3/apis/remove.ts index bda77060f5f..c0526df854c 100644 --- a/packages/storage/src/providers/s3/apis/remove.ts +++ b/packages/storage/src/providers/s3/apis/remove.ts @@ -3,18 +3,39 @@ import { Amplify } from '@aws-amplify/core'; -import { RemoveInput, RemoveOutput, S3Exception } from '../types'; -import { StorageValidationErrorCode } from '../../../errors/types/validation'; +import { + RemoveInput, + RemoveOutput, + RemoveWithPathInput, + RemoveWithPathOutput, +} from '../types'; import { remove as removeInternal } from './internal/remove'; /** * Remove a file from your S3 bucket. - * @param input - The RemoveInput object. + * @param input - The `RemoveWithPathInput` object. + * @return Output containing the removed object path. + * @throws service: `S3Exception` - S3 service errors thrown while while removing the object. + * @throws validation: `StorageValidationErrorCode` - Validation errors thrown + * when there is no path or path is empty or path has a leading slash. + */ +export function remove( + input: RemoveWithPathInput, +): Promise; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/remove | path} instead. + * + * Remove a file from your S3 bucket. + * @param input - The `RemoveInput` object. * @return Output containing the removed object key - * @throws service: {@link S3Exception} - S3 service errors thrown while getting properties - * @throws validation: {@link StorageValidationErrorCode } - Validation errors thrown + * @throws service: `S3Exception` - S3 service errors thrown while while removing the object + * @throws validation: `StorageValidationErrorCode` - Validation errors thrown + * when there is no key or its empty. */ -export const remove = (input: RemoveInput): Promise => { +export function remove(input: RemoveInput): Promise; + +export function remove(input: RemoveInput | RemoveWithPathInput) { return removeInternal(Amplify, input); -}; +} diff --git a/packages/storage/src/providers/s3/apis/server/copy.ts b/packages/storage/src/providers/s3/apis/server/copy.ts index 8a4f64d272d..e9486e10431 100644 --- a/packages/storage/src/providers/s3/apis/server/copy.ts +++ b/packages/storage/src/providers/s3/apis/server/copy.ts @@ -1,17 +1,54 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 - import { AmplifyServer, getAmplifyServerContext, } from '@aws-amplify/core/internals/adapter-core'; -import { CopyInput, CopyOutput } from '../../types'; +import { + CopyInput, + CopyOutput, + CopyWithPathInput, + CopyWithPathOutput, +} from '../../types'; import { copy as copyInternal } from '../internal/copy'; -export const copy = async ( +/** + * Copy an object from a source to a destination object within the same bucket. + * + * @param contextSpec - The isolated server context. + * @param input - The `CopyWithPathInput` object. + * @returns Output containing the destination object path. + * @throws service: `S3Exception` - Thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Thrown when + * source or destination path is not defined. + */ +export function copy( + contextSpec: AmplifyServer.ContextSpec, + input: CopyWithPathInput, +): Promise; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/copy | path} instead. + * + * Copy an object from a source to a destination object within the same bucket. Can optionally copy files across + * different accessLevel or identityId (if source object's accessLevel is 'protected'). + * + * @param contextSpec - The isolated server context. + * @param input - The `CopyInput` object. + * @returns Output containing the destination object key. + * @throws service: `S3Exception` - Thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Thrown when + * source or destination key is not defined. + */ +export function copy( contextSpec: AmplifyServer.ContextSpec, input: CopyInput, -): Promise => { +): Promise; + +export function copy( + contextSpec: AmplifyServer.ContextSpec, + input: CopyInput | CopyWithPathInput, +) { return copyInternal(getAmplifyServerContext(contextSpec).amplify, input); -}; +} diff --git a/packages/storage/src/providers/s3/apis/server/getProperties.ts b/packages/storage/src/providers/s3/apis/server/getProperties.ts index d56ee3d77f4..87a77a297a4 100644 --- a/packages/storage/src/providers/s3/apis/server/getProperties.ts +++ b/packages/storage/src/providers/s3/apis/server/getProperties.ts @@ -6,15 +6,52 @@ import { getAmplifyServerContext, } from '@aws-amplify/core/internals/adapter-core'; -import { GetPropertiesInput, GetPropertiesOutput } from '../../types'; +import { + GetPropertiesInput, + GetPropertiesOutput, + GetPropertiesWithPathInput, + GetPropertiesWithPathOutput, +} from '../../types'; import { getProperties as getPropertiesInternal } from '../internal/getProperties'; -export const getProperties = ( +/** + * Gets the properties of a file. The properties include S3 system metadata and + * the user metadata that was provided when uploading the file. + * + * @param contextSpec - The isolated server context. + * @param input - The `GetPropertiesWithPathInput` object. + * @returns Requested object properties. + * @throws An `S3Exception` when the underlying S3 service returned error. + * @throws A `StorageValidationErrorCode` when API call parameters are invalid. + */ +export function getProperties( + contextSpec: AmplifyServer.ContextSpec, + input: GetPropertiesWithPathInput, +): Promise; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/javascript/build-a-backend/storage/get-properties/ | path} instead. + * + * Gets the properties of a file. The properties include S3 system metadata and + * the user metadata that was provided when uploading the file. + * + * @param contextSpec - The isolated server context. + * @param input - The `GetPropertiesInput` object. + * @returns Requested object properties. + * @throws An `S3Exception` when the underlying S3 service returned error. + * @throws A `StorageValidationErrorCode` when API call parameters are invalid. + */ +export function getProperties( contextSpec: AmplifyServer.ContextSpec, input: GetPropertiesInput, -): Promise => { +): Promise; + +export function getProperties( + contextSpec: AmplifyServer.ContextSpec, + input: GetPropertiesInput | GetPropertiesWithPathInput, +) { return getPropertiesInternal( getAmplifyServerContext(contextSpec).amplify, input, ); -}; +} diff --git a/packages/storage/src/providers/s3/apis/server/getUrl.ts b/packages/storage/src/providers/s3/apis/server/getUrl.ts index 8cdfe2ffb4e..f9f4e80d07c 100644 --- a/packages/storage/src/providers/s3/apis/server/getUrl.ts +++ b/packages/storage/src/providers/s3/apis/server/getUrl.ts @@ -6,12 +6,63 @@ import { getAmplifyServerContext, } from '@aws-amplify/core/internals/adapter-core'; -import { GetUrlInput, GetUrlOutput } from '../../types'; +import { + GetUrlInput, + GetUrlOutput, + GetUrlWithPathInput, + GetUrlWithPathOutput, +} from '../../types'; import { getUrl as getUrlInternal } from '../internal/getUrl'; -export const getUrl = async ( +/** + * Get a temporary presigned URL to download the specified S3 object. + * The presigned URL expires when the associated role used to sign the request expires or + * the option `expiresIn` is reached. The `expiresAt` property in the output object indicates when the URL MAY expire. + * + * By default, it will not validate the object that exists in S3. If you set the `options.validateObjectExistence` + * to true, this method will verify the given object already exists in S3 before returning a presigned + * URL, and will throw `StorageError` if the object does not exist. + * + * @param contextSpec - The isolated server context. + * @param input - The `GetUrlWithPathInput` object. + * @returns Presigned URL and timestamp when the URL may expire. + * @throws service: `S3Exception` - thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Validation errors + * thrown either username or key are not defined. + * + */ +export function getUrl( + contextSpec: AmplifyServer.ContextSpec, + input: GetUrlWithPathInput, +): Promise; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/javascript/build-a-backend/storage/download/#generate-a-download-url | path} instead. + * + * Get a temporary presigned URL to download the specified S3 object. + * The presigned URL expires when the associated role used to sign the request expires or + * the option `expiresIn` is reached. The `expiresAt` property in the output object indicates when the URL MAY expire. + * + * By default, it will not validate the object that exists in S3. If you set the `options.validateObjectExistence` + * to true, this method will verify the given object already exists in S3 before returning a presigned + * URL, and will throw `StorageError` if the object does not exist. + * + * @param contextSpec - The isolated server context. + * @param input - The `GetUrlInput` object. + * @returns Presigned URL and timestamp when the URL may expire. + * @throws service: `S3Exception` - thrown when checking for existence of the object + * @throws validation: `StorageValidationErrorCode` - Validation errors + * thrown either username or key are not defined. + * + */ +export function getUrl( contextSpec: AmplifyServer.ContextSpec, input: GetUrlInput, -): Promise => { +): Promise; + +export function getUrl( + contextSpec: AmplifyServer.ContextSpec, + input: GetUrlInput | GetUrlWithPathInput, +) { return getUrlInternal(getAmplifyServerContext(contextSpec).amplify, input); -}; +} diff --git a/packages/storage/src/providers/s3/apis/server/list.ts b/packages/storage/src/providers/s3/apis/server/list.ts index 1c15a98af88..66d0ad4cd22 100644 --- a/packages/storage/src/providers/s3/apis/server/list.ts +++ b/packages/storage/src/providers/s3/apis/server/list.ts @@ -1,6 +1,5 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 - import { AmplifyServer, getAmplifyServerContext, @@ -9,45 +8,78 @@ import { import { ListAllInput, ListAllOutput, + ListAllWithPathInput, + ListAllWithPathOutput, ListPaginateInput, ListPaginateOutput, - S3Exception, + ListPaginateWithPathInput, + ListPaginateWithPathOutput, } from '../../types'; import { list as listInternal } from '../internal/list'; -import { StorageValidationErrorCode } from '../../../../errors/types/validation'; -interface ListApi { - /** - * Lists bucket objects with pagination. - * @param {ListPaginateInput} input The input object - * @return {Promise} - Promise resolves to list of keys and metadata with - * pageSize defaulting to 1000. Additionally the result will include a nextToken if there are more items to retrieve - * @throws service: {@link S3Exception} - S3 service errors thrown when checking for existence of bucket - * @throws validation: {@link StorageValidationErrorCode } - thrown when there are issues with credentials - */ - ( - contextSpec: AmplifyServer.ContextSpec, - input?: ListPaginateInput, - ): Promise; - /** - * Lists all bucket objects. - * @param {ListAllInput} input The input object - * @return {Promise} - Promise resolves to list of keys and metadata for all objects in path - * @throws service: {@link S3Exception} - S3 service errors thrown when checking for existence of bucket - * @throws validation: {@link StorageValidationErrorCode } - thrown when there are issues with credentials - */ - ( - contextSpec: AmplifyServer.ContextSpec, - input?: ListAllInput, - ): Promise; -} +/** + * List files in pages with the given `path`. + * `pageSize` is defaulted to 1000. Additionally, the result will include a `nextToken` if there are more items to retrieve. + * @param input - The `ListPaginateWithPathInput` object. + * @param contextSpec - The context spec used to get the Amplify server context. + * @returns A list of objects with path and metadata + * @throws service: `S3Exception` - S3 service errors thrown when checking for existence of bucket + * @throws validation: `StorageValidationErrorCode` - thrown when there are issues with credentials + */ +export function list( + contextSpec: AmplifyServer.ContextSpec, + input: ListPaginateWithPathInput, +): Promise; +/** + * List all files from S3 for a given `path`. You can set `listAll` to true in `options` to get all the files from S3. + * @param input - The `ListAllWithPathInput` object. + * @param contextSpec - The context spec used to get the Amplify server context. + * @returns A list of all objects with path and metadata + * @throws service: `S3Exception` - S3 service errors thrown when checking for existence of bucket + * @throws validation: `StorageValidationErrorCode` - thrown when there are issues with credentials + */ +export function list( + contextSpec: AmplifyServer.ContextSpec, + input: ListAllWithPathInput, +): Promise; +/** + * @deprecated The `prefix` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/list | path} instead. + * List files in pages with the given `prefix`. + * `pageSize` is defaulted to 1000. Additionally, the result will include a `nextToken` if there are more items to retrieve. + * @param input - The `ListPaginateInput` object. + * @returns A list of objects with key and metadata + * @throws service: `S3Exception` - S3 service errors thrown when checking for existence of bucket + * @throws validation: `StorageValidationErrorCode` - thrown when there are issues with credentials + */ +export function list( + contextSpec: AmplifyServer.ContextSpec, + input?: ListPaginateInput, +): Promise; +/** + * @deprecated The `prefix` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/list | path} instead. + * List all files from S3 for a given `prefix`. You can set `listAll` to true in `options` to get all the files from S3. + * @param input - The `ListAllInput` object. + * @returns A list of all objects with key and metadata + * @throws service: `S3Exception` - S3 service errors thrown when checking for existence of bucket + * @throws validation: `StorageValidationErrorCode` - thrown when there are issues with credentials + */ +export function list( + contextSpec: AmplifyServer.ContextSpec, + input?: ListAllInput, +): Promise; -export const list: ListApi = ( +export function list( contextSpec: AmplifyServer.ContextSpec, - input?: ListAllInput | ListPaginateInput, -): Promise => { + input?: + | ListAllInput + | ListPaginateInput + | ListAllWithPathInput + | ListPaginateWithPathInput, +) { return listInternal( getAmplifyServerContext(contextSpec).amplify, input ?? {}, ); -}; +} diff --git a/packages/storage/src/providers/s3/apis/server/remove.ts b/packages/storage/src/providers/s3/apis/server/remove.ts index 8dbd45bfd19..5b788447f64 100644 --- a/packages/storage/src/providers/s3/apis/server/remove.ts +++ b/packages/storage/src/providers/s3/apis/server/remove.ts @@ -6,12 +6,47 @@ import { getAmplifyServerContext, } from '@aws-amplify/core/internals/adapter-core'; -import { RemoveInput, RemoveOutput } from '../../types'; +import { + RemoveInput, + RemoveOutput, + RemoveWithPathInput, + RemoveWithPathOutput, +} from '../../types'; import { remove as removeInternal } from '../internal/remove'; -export const remove = ( +/** + * Remove a file from your S3 bucket. + * @param input - The `RemoveWithPathInput` object. + * @param contextSpec - The context spec used to get the Amplify server context. + * @return Output containing the removed object path. + * @throws service: `S3Exception` - S3 service errors thrown while while removing the object. + * @throws validation: `StorageValidationErrorCode` - Validation errors thrown + * when there is no path or path is empty or path has a leading slash. + */ +export function remove( + contextSpec: AmplifyServer.ContextSpec, + input: RemoveWithPathInput, +): Promise; +/** + * @deprecated The `key` and `accessLevel` parameters are deprecated and may be removed in the next major version. + * Please use {@link https://docs.amplify.aws/react/build-a-backend/storage/remove | path} instead. + * + * Remove a file from your S3 bucket. + * @param input - The `RemoveInput` object. + * @param contextSpec - The context spec used to get the Amplify server context. + * @return Output containing the removed object key + * @throws service: `S3Exception` - S3 service errors thrown while while removing the object + * @throws validation: `StorageValidationErrorCode` - Validation errors thrown + * when there is no key or its empty. + */ +export function remove( contextSpec: AmplifyServer.ContextSpec, input: RemoveInput, -): Promise => { +): Promise; + +export function remove( + contextSpec: AmplifyServer.ContextSpec, + input: RemoveInput | RemoveWithPathInput, +) { return removeInternal(getAmplifyServerContext(contextSpec).amplify, input); -}; +} diff --git a/packages/storage/src/providers/s3/apis/uploadData/index.ts b/packages/storage/src/providers/s3/apis/uploadData/index.ts index 2c1926cb1e0..8669309ec53 100644 --- a/packages/storage/src/providers/s3/apis/uploadData/index.ts +++ b/packages/storage/src/providers/s3/apis/uploadData/index.ts @@ -1,7 +1,12 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import { S3Exception, UploadDataInput, UploadDataOutput } from '../../types'; +import { + UploadDataInput, + UploadDataOutput, + UploadDataWithPathInput, + UploadDataWithPathOutput, +} from '../../types'; import { createUploadTask } from '../../utils'; import { assertValidationError } from '../../../../errors/utils/assertValidationError'; import { StorageValidationErrorCode } from '../../../../errors/types/validation'; @@ -12,18 +17,77 @@ import { putObjectJob } from './putObjectJob'; import { getMultipartUploadHandlers } from './multipart'; /** - * Upload data to specified S3 object. By default, it uses single PUT operation to upload if the data is less than 5MB. - * Otherwise, it uses multipart upload to upload the data. If the data length is unknown, it uses multipart upload. + * Upload data to the specified S3 object path. By default uses single PUT operation to upload if the payload is less than 5MB. + * Otherwise, uses multipart upload to upload the payload. If the payload length cannot be determined, uses multipart upload. * * Limitations: * * Maximum object size is 5TB. * * Maximum object size if the size cannot be determined before upload is 50GB. * - * @param input - The UploadDataInput object. + * @throws Service: `S3Exception` thrown when checking for existence of the object. + * @throws Validation: `StorageValidationErrorCode` thrown when a validation error occurs. + * + * @param input - A `UploadDataWithPathInput` object. + * * @returns A cancelable and resumable task exposing result promise from `result` * property. - * @throws service: {@link S3Exception} - thrown when checking for existence of the object - * @throws validation: {@link StorageValidationErrorCode } - Validation errors. + * + * @example + * ```ts + * // Upload a file to s3 bucket + * await uploadData({ path, data: file, options: { + * onProgress, // Optional progress callback. + * } }).result; + * ``` + * + * @example + * ```ts + * // Cancel a task + * const uploadTask = uploadData({ path, data: file }); + * //... + * uploadTask.cancel(); + * try { + * await uploadTask.result; + * } catch (error) { + * if(isCancelError(error)) { + * // Handle error thrown by task cancelation. + * } + * } + *``` + * + * @example + * ```ts + * // Pause and resume a task + * const uploadTask = uploadData({ path, data: file }); + * //... + * uploadTask.pause(); + * //... + * uploadTask.resume(); + * //... + * await uploadTask.result; + * ``` + */ +export function uploadData( + input: UploadDataWithPathInput, +): UploadDataWithPathOutput; + +/** + * Upload data to the specified S3 object key. By default uses single PUT operation to upload if the payload is less than 5MB. + * Otherwise, uses multipart upload to upload the payload. If the payload length cannot be determined, uses multipart upload. + * + * Limitations: + * * Maximum object size is 5TB. + * * Maximum object size if the size cannot be determined before upload is 50GB. + * + * @deprecated The `key` and `accessLevel` parameters are deprecated and will be removed in next major version. + * Please use {@link https://docs.amplify.aws/javascript/build-a-backend/storage/upload/#uploaddata | path} instead. + * + * @throws Service: `S3Exception` thrown when checking for existence of the object. + * @throws Validation: `StorageValidationErrorCode` thrown when a validation error occurs. + * + * @param input - A `UploadDataInput` object. + * + * @returns A cancelable and resumable task exposing result promise from the `result` property. * * @example * ```ts @@ -32,6 +96,7 @@ import { getMultipartUploadHandlers } from './multipart'; * onProgress, // Optional progress callback. * } }).result; * ``` + * * @example * ```ts * // Cancel a task @@ -59,7 +124,9 @@ import { getMultipartUploadHandlers } from './multipart'; * await uploadTask.result; * ``` */ -export const uploadData = (input: UploadDataInput): UploadDataOutput => { +export function uploadData(input: UploadDataInput): UploadDataOutput; + +export function uploadData(input: UploadDataInput | UploadDataWithPathInput) { const { data } = input; const dataByteLength = byteLength(data); @@ -69,6 +136,7 @@ export const uploadData = (input: UploadDataInput): UploadDataOutput => { ); if (dataByteLength && dataByteLength <= DEFAULT_PART_SIZE) { + // Single part upload const abortController = new AbortController(); return createUploadTask({ @@ -79,6 +147,7 @@ export const uploadData = (input: UploadDataInput): UploadDataOutput => { }, }); } else { + // Multipart upload const { multipartUploadJob, onPause, onResume, onCancel } = getMultipartUploadHandlers(input, dataByteLength); @@ -92,4 +161,4 @@ export const uploadData = (input: UploadDataInput): UploadDataOutput => { onResume, }); } -}; +} diff --git a/packages/storage/src/providers/s3/apis/uploadData/multipart/initialUpload.ts b/packages/storage/src/providers/s3/apis/uploadData/multipart/initialUpload.ts index f9b402c2e86..1179b89c08b 100644 --- a/packages/storage/src/providers/s3/apis/uploadData/multipart/initialUpload.ts +++ b/packages/storage/src/providers/s3/apis/uploadData/multipart/initialUpload.ts @@ -18,8 +18,8 @@ interface LoadOrCreateMultipartUploadOptions { s3Config: ResolvedS3Config; data: StorageUploadDataPayload; bucket: string; - accessLevel: StorageAccessLevel; - keyPrefix: string; + accessLevel?: StorageAccessLevel; + keyPrefix?: string; key: string; contentType?: string; contentDisposition?: string; @@ -54,7 +54,7 @@ export const loadOrCreateMultipartUpload = async ({ metadata, abortSignal, }: LoadOrCreateMultipartUploadOptions): Promise => { - const finalKey = keyPrefix + key; + const finalKey = keyPrefix !== undefined ? keyPrefix + key : key; let cachedUpload: | { @@ -75,6 +75,7 @@ export const loadOrCreateMultipartUpload = async ({ accessLevel, key, }); + const cachedUploadParts = await findCachedUploadParts({ s3Config, cacheKey: uploadCacheKey, diff --git a/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadCache.ts b/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadCache.ts index 1d8148223b1..e5619655f3b 100644 --- a/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadCache.ts +++ b/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadCache.ts @@ -94,7 +94,7 @@ interface UploadsCacheKeyOptions { size: number; contentType?: string; bucket: string; - accessLevel: StorageAccessLevel; + accessLevel?: StorageAccessLevel; key: string; file?: File; } @@ -112,10 +112,19 @@ export const getUploadsCacheKey = ({ accessLevel, key, }: UploadsCacheKeyOptions) => { + let levelStr; const resolvedContentType = contentType ?? file?.type ?? 'application/octet-stream'; - const levelStr = accessLevel === 'guest' ? 'public' : accessLevel; + + // If no access level is defined, we're using custom gen2 access rules + if (accessLevel === undefined) { + levelStr = 'custom'; + } else { + levelStr = accessLevel === 'guest' ? 'public' : accessLevel; + } + const baseId = `${size}_${resolvedContentType}_${bucket}_${levelStr}_${key}`; + if (file) { return `${file.name}_${file.lastModified}_${baseId}`; } else { diff --git a/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadHandlers.ts b/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadHandlers.ts index 587ee33c434..e216feeede7 100644 --- a/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadHandlers.ts +++ b/packages/storage/src/providers/s3/apis/uploadData/multipart/uploadHandlers.ts @@ -4,14 +4,21 @@ import { Amplify, StorageAccessLevel } from '@aws-amplify/core'; import { StorageAction } from '@aws-amplify/core/internals/utils'; -import { UploadDataInput } from '../../../types'; -import { resolveS3ConfigAndInput } from '../../../utils'; -import { Item as S3Item } from '../../../types/outputs'; +import { UploadDataInput, UploadDataWithPathInput } from '../../../types'; +import { + resolveS3ConfigAndInput, + validateStorageOperationInput, +} from '../../../utils'; +import { ItemWithKey, ItemWithPath } from '../../../types/outputs'; import { DEFAULT_ACCESS_LEVEL, DEFAULT_QUEUE_SIZE, + STORAGE_INPUT_KEY, } from '../../../utils/constants'; -import { ResolvedS3Config } from '../../../types/options'; +import { + ResolvedS3Config, + UploadDataOptionsWithKey, +} from '../../../types/options'; import { StorageError } from '../../../../../errors/StorageError'; import { CanceledError } from '../../../../../errors/CanceledError'; import { @@ -36,10 +43,12 @@ import { getDataChunker } from './getDataChunker'; * @internal */ export const getMultipartUploadHandlers = ( - { options: uploadDataOptions, key, data }: UploadDataInput, + uploadDataInput: UploadDataInput | UploadDataWithPathInput, size?: number, ) => { - let resolveCallback: ((value: S3Item) => void) | undefined; + let resolveCallback: + | ((value: ItemWithKey | ItemWithPath) => void) + | undefined; let rejectCallback: ((reason?: any) => void) | undefined; let inProgressUpload: | { @@ -49,43 +58,62 @@ export const getMultipartUploadHandlers = ( | undefined; let resolvedS3Config: ResolvedS3Config | undefined; let abortController: AbortController | undefined; + let resolvedAccessLevel: StorageAccessLevel | undefined; let resolvedBucket: string | undefined; let resolvedKeyPrefix: string | undefined; + let resolvedIdentityId: string | undefined; let uploadCacheKey: string | undefined; + let finalKey: string; // Special flag that differentiates HTTP requests abort error caused by pause() from ones caused by cancel(). // The former one should NOT cause the upload job to throw, but cancels any pending HTTP requests. // This should be replaced by a special abort reason. However,the support of this API is lagged behind. let isAbortSignalFromPause = false; - const startUpload = async (): Promise => { + const startUpload = async (): Promise => { + const { options: uploadDataOptions, data } = uploadDataInput; const resolvedS3Options = await resolveS3ConfigAndInput( Amplify, uploadDataOptions, ); + abortController = new AbortController(); + isAbortSignalFromPause = false; resolvedS3Config = resolvedS3Options.s3Config; resolvedBucket = resolvedS3Options.bucket; - resolvedKeyPrefix = resolvedS3Options.keyPrefix; + resolvedIdentityId = resolvedS3Options.identityId; - abortController = new AbortController(); - isAbortSignalFromPause = false; + const { inputType, objectKey } = validateStorageOperationInput( + uploadDataInput, + resolvedIdentityId, + ); const { contentDisposition, contentEncoding, contentType = 'application/octet-stream', metadata, - accessLevel, onProgress, } = uploadDataOptions ?? {}; + finalKey = objectKey; + + // Resolve "key" specific options + if (inputType === STORAGE_INPUT_KEY) { + const accessLevel = (uploadDataOptions as UploadDataOptionsWithKey) + ?.accessLevel; + + resolvedKeyPrefix = resolvedS3Options.keyPrefix; + finalKey = resolvedKeyPrefix + objectKey; + resolvedAccessLevel = resolveAccessLevel(accessLevel); + } + if (!inProgressUpload) { const { uploadId, cachedParts } = await loadOrCreateMultipartUpload({ s3Config: resolvedS3Config, - accessLevel: resolveAccessLevel(accessLevel), + accessLevel: resolvedAccessLevel, bucket: resolvedBucket, keyPrefix: resolvedKeyPrefix, - key, + key: objectKey, contentType, contentDisposition, contentEncoding, @@ -100,15 +128,14 @@ export const getMultipartUploadHandlers = ( }; } - const finalKey = resolvedKeyPrefix + key; uploadCacheKey = size ? getUploadsCacheKey({ file: data instanceof File ? data : undefined, - accessLevel: resolveAccessLevel(uploadDataOptions?.accessLevel), + accessLevel: resolvedAccessLevel, contentType: uploadDataOptions?.contentType, bucket: resolvedBucket!, size, - key, + key: objectKey, }) : undefined; @@ -186,12 +213,15 @@ export const getMultipartUploadHandlers = ( await removeCachedUpload(uploadCacheKey); } - return { - key, + const result = { eTag, contentType, metadata, }; + + return inputType === STORAGE_INPUT_KEY + ? { key: objectKey, ...result } + : { path: objectKey, ...result }; }; const startUploadWithResumability = () => @@ -208,7 +238,7 @@ export const getMultipartUploadHandlers = ( }); const multipartUploadJob = () => - new Promise((resolve, reject) => { + new Promise((resolve, reject) => { resolveCallback = resolve; rejectCallback = reject; startUploadWithResumability(); @@ -232,7 +262,7 @@ export const getMultipartUploadHandlers = ( // 3. clear multipart upload on server side. await abortMultipartUpload(resolvedS3Config!, { Bucket: resolvedBucket, - Key: resolvedKeyPrefix! + key, + Key: finalKey, UploadId: inProgressUpload?.uploadId, }); }; diff --git a/packages/storage/src/providers/s3/apis/uploadData/putObjectJob.ts b/packages/storage/src/providers/s3/apis/uploadData/putObjectJob.ts index 5d1a40786ad..bb9b5ec4519 100644 --- a/packages/storage/src/providers/s3/apis/uploadData/putObjectJob.ts +++ b/packages/storage/src/providers/s3/apis/uploadData/putObjectJob.ts @@ -4,11 +4,16 @@ import { Amplify } from '@aws-amplify/core'; import { StorageAction } from '@aws-amplify/core/internals/utils'; -import { UploadDataInput } from '../../types'; -import { calculateContentMd5, resolveS3ConfigAndInput } from '../../utils'; -import { Item as S3Item } from '../../types/outputs'; +import { UploadDataInput, UploadDataWithPathInput } from '../../types'; +import { + calculateContentMd5, + resolveS3ConfigAndInput, + validateStorageOperationInput, +} from '../../utils'; +import { ItemWithKey, ItemWithPath } from '../../types/outputs'; import { putObject } from '../../utils/client'; import { getStorageUserAgentValue } from '../../utils/userAgent'; +import { STORAGE_INPUT_KEY } from '../../utils/constants'; /** * Get a function the returns a promise to call putObject API to S3. @@ -17,15 +22,21 @@ import { getStorageUserAgentValue } from '../../utils/userAgent'; */ export const putObjectJob = ( - { options: uploadDataOptions, key, data }: UploadDataInput, + uploadDataInput: UploadDataInput | UploadDataWithPathInput, abortSignal: AbortSignal, totalLength?: number, ) => - async (): Promise => { - const { bucket, keyPrefix, s3Config, isObjectLockEnabled } = + async (): Promise => { + const { options: uploadDataOptions, data } = uploadDataInput; + const { bucket, keyPrefix, s3Config, isObjectLockEnabled, identityId } = await resolveS3ConfigAndInput(Amplify, uploadDataOptions); + const { inputType, objectKey } = validateStorageOperationInput( + uploadDataInput, + identityId, + ); - const finalKey = keyPrefix + key; + const finalKey = + inputType === STORAGE_INPUT_KEY ? keyPrefix + objectKey : objectKey; const { contentDisposition, contentEncoding, @@ -55,12 +66,15 @@ export const putObjectJob = }, ); - return { - key, + const result = { eTag, versionId, contentType, metadata, size: totalLength, }; + + return inputType === STORAGE_INPUT_KEY + ? { key: objectKey, ...result } + : { path: objectKey, ...result }; }; diff --git a/packages/storage/src/providers/s3/index.ts b/packages/storage/src/providers/s3/index.ts index dd2f2eb015e..2ec8bb61527 100644 --- a/packages/storage/src/providers/s3/index.ts +++ b/packages/storage/src/providers/s3/index.ts @@ -13,22 +13,38 @@ export { export { UploadDataInput, + UploadDataWithPathInput, DownloadDataInput, + DownloadDataWithPathInput, RemoveInput, + RemoveWithPathInput, ListAllInput, + ListAllWithPathInput, ListPaginateInput, + ListPaginateWithPathInput, GetPropertiesInput, + GetPropertiesWithPathInput, CopyInput, + CopyWithPathInput, GetUrlInput, + GetUrlWithPathInput, } from './types/inputs'; export { UploadDataOutput, + UploadDataWithPathOutput, DownloadDataOutput, + DownloadDataWithPathOutput, RemoveOutput, + RemoveWithPathOutput, ListAllOutput, + ListAllWithPathOutput, ListPaginateOutput, + ListPaginateWithPathOutput, GetPropertiesOutput, + GetPropertiesWithPathOutput, CopyOutput, + CopyWithPathOutput, GetUrlOutput, + GetUrlWithPathOutput, } from './types/outputs'; diff --git a/packages/storage/src/providers/s3/types/index.ts b/packages/storage/src/providers/s3/types/index.ts index 4366ee48383..4299687cd8e 100644 --- a/packages/storage/src/providers/s3/types/index.ts +++ b/packages/storage/src/providers/s3/types/index.ts @@ -2,35 +2,58 @@ // SPDX-License-Identifier: Apache-2.0 export { - GetUrlOptions, - UploadDataOptions, - GetPropertiesOptions, - ListAllOptions, - ListPaginateOptions, + GetUrlOptionsWithKey, + GetUrlOptionsWithPath, + UploadDataOptionsWithPath, + UploadDataOptionsWithKey, + GetPropertiesOptionsWithKey, + GetPropertiesOptionsWithPath, + ListAllOptionsWithPrefix, + ListPaginateOptionsWithPrefix, + ListAllOptionsWithPath, + ListPaginateOptionsWithPath, RemoveOptions, - DownloadDataOptions, - CopyDestinationOptions, - CopySourceOptions, + DownloadDataOptionsWithPath, + DownloadDataOptionsWithKey, + CopyDestinationOptionsWithKey, + CopySourceOptionsWithKey, } from './options'; export { - DownloadDataOutput, - GetUrlOutput, UploadDataOutput, - ListOutputItem, + UploadDataWithPathOutput, + DownloadDataOutput, + DownloadDataWithPathOutput, + RemoveOutput, + RemoveWithPathOutput, ListAllOutput, + ListAllWithPathOutput, ListPaginateOutput, + ListPaginateWithPathOutput, GetPropertiesOutput, + GetPropertiesWithPathOutput, CopyOutput, - RemoveOutput, + CopyWithPathOutput, + GetUrlOutput, + GetUrlWithPathOutput, + ListOutputItem, + ListOutputItemWithPath, } from './outputs'; export { CopyInput, + CopyWithPathInput, GetPropertiesInput, + GetPropertiesWithPathInput, GetUrlInput, - ListAllInput, - ListPaginateInput, + GetUrlWithPathInput, + RemoveWithPathInput, RemoveInput, DownloadDataInput, + DownloadDataWithPathInput, UploadDataInput, + UploadDataWithPathInput, + ListAllInput, + ListPaginateInput, + ListAllWithPathInput, + ListPaginateWithPathInput, } from './inputs'; export { S3Exception } from './errors'; diff --git a/packages/storage/src/providers/s3/types/inputs.ts b/packages/storage/src/providers/s3/types/inputs.ts index 9a360d0bfe3..f7bd6c5db44 100644 --- a/packages/storage/src/providers/s3/types/inputs.ts +++ b/packages/storage/src/providers/s3/types/inputs.ts @@ -2,67 +2,136 @@ // SPDX-License-Identifier: Apache-2.0 import { - StorageCopyInput, - StorageDownloadDataInput, - StorageGetPropertiesInput, - StorageGetUrlInput, - StorageListInput, - StorageRemoveInput, - StorageUploadDataInput, + StorageCopyInputWithKey, + StorageCopyInputWithPath, + StorageDownloadDataInputWithKey, + StorageDownloadDataInputWithPath, + StorageGetPropertiesInputWithKey, + StorageGetPropertiesInputWithPath, + StorageGetUrlInputWithKey, + StorageGetUrlInputWithPath, + StorageListInputWithPath, + StorageListInputWithPrefix, + StorageRemoveInputWithKey, + StorageRemoveInputWithPath, + StorageUploadDataInputWithKey, + StorageUploadDataInputWithPath, } from '../../../types'; import { - CopyDestinationOptions, - CopySourceOptions, - DownloadDataOptions, - GetPropertiesOptions, - GetUrlOptions, - ListAllOptions, - ListPaginateOptions, + CopyDestinationOptionsWithKey, + CopySourceOptionsWithKey, + DownloadDataOptionsWithKey, + DownloadDataOptionsWithPath, + GetPropertiesOptionsWithKey, + GetPropertiesOptionsWithPath, + GetUrlOptionsWithKey, + GetUrlOptionsWithPath, + ListAllOptionsWithPath, + ListAllOptionsWithPrefix, + ListPaginateOptionsWithPath, + ListPaginateOptionsWithPrefix, RemoveOptions, - UploadDataOptions, + UploadDataOptionsWithKey, + UploadDataOptionsWithPath, } from '../types'; // TODO: support use accelerate endpoint option /** + * @deprecated Use {@link CopyWithPathInput} instead. * Input type for S3 copy API. */ -export type CopyInput = StorageCopyInput< - CopySourceOptions, - CopyDestinationOptions +export type CopyInput = StorageCopyInputWithKey< + CopySourceOptionsWithKey, + CopyDestinationOptionsWithKey >; +/** + * Input type with path for S3 copy API. + */ +export type CopyWithPathInput = StorageCopyInputWithPath; /** + * @deprecated Use {@link GetPropertiesWithPathInput} instead. * Input type for S3 getProperties API. */ export type GetPropertiesInput = - StorageGetPropertiesInput; + StorageGetPropertiesInputWithKey; +/** + * Input type with for S3 getProperties API. + */ +export type GetPropertiesWithPathInput = + StorageGetPropertiesInputWithPath; /** + * @deprecated Use {@link GetUrlWithPathInput} instead. * Input type for S3 getUrl API. */ -export type GetUrlInput = StorageGetUrlInput; +export type GetUrlInput = StorageGetUrlInputWithKey; +/** + * Input type with path for S3 getUrl API. + */ +export type GetUrlWithPathInput = + StorageGetUrlInputWithPath; + +/** + * Input type with path for S3 list API. Lists all bucket objects. + */ +export type ListAllWithPathInput = + StorageListInputWithPath; + +/** + * Input type with path for S3 list API. Lists bucket objects with pagination. + */ +export type ListPaginateWithPathInput = + StorageListInputWithPath; /** + * @deprecated Use {@link ListAllWithPathInput} instead. * Input type for S3 list API. Lists all bucket objects. */ -export type ListAllInput = StorageListInput; +export type ListAllInput = StorageListInputWithPrefix; /** + * @deprecated Use {@link ListPaginateWithPathInput} instead. * Input type for S3 list API. Lists bucket objects with pagination. */ -export type ListPaginateInput = StorageListInput; +export type ListPaginateInput = + StorageListInputWithPrefix; + +/** + * @deprecated Use {@link RemoveWithPathInput} instead. + * Input type with key for S3 remove API. + */ +export type RemoveInput = StorageRemoveInputWithKey; /** - * Input type for S3 remove API. + * Input type with path for S3 remove API. */ -export type RemoveInput = StorageRemoveInput; +export type RemoveWithPathInput = StorageRemoveInputWithPath< + Omit +>; /** + * @deprecated Use {@link DownloadDataWithPathInput} instead. * Input type for S3 downloadData API. */ -export type DownloadDataInput = StorageDownloadDataInput; +export type DownloadDataInput = + StorageDownloadDataInputWithKey; /** + * Input type with path for S3 downloadData API. + */ +export type DownloadDataWithPathInput = + StorageDownloadDataInputWithPath; + +/** + * @deprecated Use {@link UploadDataWithPathInput} instead. * Input type for S3 uploadData API. */ -export type UploadDataInput = StorageUploadDataInput; +export type UploadDataInput = + StorageUploadDataInputWithKey; + +/** + * Input type with path for S3 uploadData API. + */ +export type UploadDataWithPathInput = + StorageUploadDataInputWithPath; diff --git a/packages/storage/src/providers/s3/types/options.ts b/packages/storage/src/providers/s3/types/options.ts index dad92d72f1b..4d0af341f52 100644 --- a/packages/storage/src/providers/s3/types/options.ts +++ b/packages/storage/src/providers/s3/types/options.ts @@ -18,11 +18,22 @@ interface CommonOptions { useAccelerateEndpoint?: boolean; } +/** @deprecated This may be removed in the next major version. */ type ReadOptions = - | { accessLevel?: 'guest' | 'private' } - | { accessLevel: 'protected'; targetIdentityId?: string }; - + | { + /** @deprecated This may be removed in the next major version. */ + accessLevel?: 'guest' | 'private'; + } + | { + /** @deprecated This may be removed in the next major version. */ + accessLevel: 'protected'; + /** @deprecated This may be removed in the next major version. */ + targetIdentityId?: string; + }; + +/** @deprecated This may be removed in the next major version. */ interface WriteOptions { + /** @deprecated This may be removed in the next major version. */ accessLevel?: StorageAccessLevel; } @@ -46,7 +57,9 @@ interface TransferOptions { /** * Input options type for S3 getProperties API. */ -export type GetPropertiesOptions = ReadOptions & CommonOptions; +/** @deprecated Use {@link GetPropertiesOptionsWithPath} instead. */ +export type GetPropertiesOptionsWithKey = ReadOptions & CommonOptions; +export type GetPropertiesOptionsWithPath = CommonOptions; /** * Input options type for S3 getProperties API. @@ -54,46 +67,71 @@ export type GetPropertiesOptions = ReadOptions & CommonOptions; export type RemoveOptions = WriteOptions & CommonOptions; /** - * Input options type for S3 list API. + * @deprecated Use {@link ListAllOptionsWithPath} instead. + * Input options type with prefix for S3 list all API. */ -export type ListAllOptions = StorageListAllOptions & +export type ListAllOptionsWithPrefix = StorageListAllOptions & ReadOptions & CommonOptions; /** - * Input options type for S3 list API. + * @deprecated Use {@link ListPaginateOptionsWithPath} instead. + * Input options type with prefix for S3 list API to paginate items. */ -export type ListPaginateOptions = StorageListPaginateOptions & +export type ListPaginateOptionsWithPrefix = StorageListPaginateOptions & ReadOptions & CommonOptions; +/** + * Input options type with path for S3 list all API. + */ +export type ListAllOptionsWithPath = Omit< + StorageListAllOptions, + 'accessLevel' +> & + CommonOptions; + +/** + * Input options type with path for S3 list API to paginate items. + */ +export type ListPaginateOptionsWithPath = Omit< + StorageListPaginateOptions, + 'accessLevel' +> & + CommonOptions; + /** * Input options type for S3 getUrl API. */ -export type GetUrlOptions = ReadOptions & - CommonOptions & { - /** - * Whether to head object to make sure the object existence before downloading. - * @default false - */ - validateObjectExistence?: boolean; - /** - * Number of seconds till the URL expires. - * @default 900 (15 minutes) - */ - expiresIn?: number; - }; +export type GetUrlOptions = CommonOptions & { + /** + * Whether to head object to make sure the object existence before downloading. + * @default false + */ + validateObjectExistence?: boolean; + /** + * Number of seconds till the URL expires. + * @default 900 (15 minutes) + */ + expiresIn?: number; +}; + +/** @deprecated Use {@link GetUrlOptionsWithPath} instead. */ +export type GetUrlOptionsWithKey = ReadOptions & GetUrlOptions; +export type GetUrlOptionsWithPath = GetUrlOptions; /** * Input options type for S3 downloadData API. */ -export type DownloadDataOptions = ReadOptions & - CommonOptions & +export type DownloadDataOptions = CommonOptions & TransferOptions & BytesRangeOptions; -export type UploadDataOptions = WriteOptions & - CommonOptions & +/** @deprecated Use {@link DownloadDataOptionsWithPath} instead. */ +export type DownloadDataOptionsWithKey = ReadOptions & DownloadDataOptions; +export type DownloadDataOptionsWithPath = DownloadDataOptions; + +export type UploadDataOptions = CommonOptions & TransferOptions & { /** * The default content-disposition header value of the file when downloading it. @@ -117,11 +155,19 @@ export type UploadDataOptions = WriteOptions & metadata?: Record; }; -export type CopySourceOptions = ReadOptions & { +/** @deprecated Use {@link UploadDataOptionsWithPath} instead. */ +export type UploadDataOptionsWithKey = WriteOptions & UploadDataOptions; +export type UploadDataOptionsWithPath = UploadDataOptions; + +/** @deprecated This may be removed in the next major version. */ +export type CopySourceOptionsWithKey = ReadOptions & { + /** @deprecated This may be removed in the next major version. */ key: string; }; -export type CopyDestinationOptions = WriteOptions & { +/** @deprecated This may be removed in the next major version. */ +export type CopyDestinationOptionsWithKey = WriteOptions & { + /** @deprecated This may be removed in the next major version. */ key: string; }; diff --git a/packages/storage/src/providers/s3/types/outputs.ts b/packages/storage/src/providers/s3/types/outputs.ts index bbcb9fc75b1..44524536a3b 100644 --- a/packages/storage/src/providers/s3/types/outputs.ts +++ b/packages/storage/src/providers/s3/types/outputs.ts @@ -5,15 +5,16 @@ import { DownloadTask, StorageDownloadDataOutput, StorageGetUrlOutput, - StorageItem, + StorageItemWithKey, + StorageItemWithPath, StorageListOutput, UploadTask, } from '../../../types'; /** - * type for S3 item. + * Base type for an S3 item. */ -export interface Item extends StorageItem { +export interface ItemBase { /** * VersionId used to reference a specific version of the object. */ @@ -25,36 +26,83 @@ export interface Item extends StorageItem { } /** - * type for S3 list item. + * @deprecated Use {@link ListOutputItemWithPath} instead. + * type for S3 list item with key. */ -export type ListOutputItem = Omit; +export type ListOutputItem = Omit; + +/** + * type for S3 list item with path. + */ +export type ListOutputItemWithPath = Omit; + +/** + * @deprecated Use {@link ItemWithPath} instead. + */ +export type ItemWithKey = ItemBase & StorageItemWithKey; + +/** + * type for S3 list item with path. + */ +export type ItemWithPath = ItemBase & StorageItemWithPath; /** * Output type for S3 downloadData API. + * @deprecated Use {@link DownloadDataWithPathOutput} instead. + */ +export type DownloadDataOutput = DownloadTask< + StorageDownloadDataOutput +>; +/** + * Output type with path for S3 downloadData API. */ -export type DownloadDataOutput = DownloadTask>; +export type DownloadDataWithPathOutput = DownloadTask< + StorageDownloadDataOutput +>; /** * Output type for S3 getUrl API. + * @deprecated Use {@link GetUrlWithPathOutput} instead. */ export type GetUrlOutput = StorageGetUrlOutput; +/** + * Output type with path for S3 getUrl API. + * */ +export type GetUrlWithPathOutput = StorageGetUrlOutput; /** * Output type for S3 uploadData API. + * @deprecated Use {@link UploadDataWithPathOutput} instead. */ -export type UploadDataOutput = UploadTask; +export type UploadDataOutput = UploadTask; +/** + * Output type with path for S3 uploadData API. + * */ +export type UploadDataWithPathOutput = UploadTask; /** * Output type for S3 getProperties API. - */ -export type GetPropertiesOutput = Item; + * @deprecated Use {@link GetPropertiesWithPathOutput} instead. + * */ +export type GetPropertiesOutput = ItemBase & StorageItemWithKey; +/** + * Output type with path for S3 getProperties API. + * */ +export type GetPropertiesWithPathOutput = ItemBase & StorageItemWithPath; /** + * @deprecated Use {@link ListAllWithPathOutput} instead. * Output type for S3 list API. Lists all bucket objects. */ export type ListAllOutput = StorageListOutput; /** + * Output type with path for S3 list API. Lists all bucket objects. + */ +export type ListAllWithPathOutput = StorageListOutput; + +/** + * @deprecated Use {@link ListPaginateWithPathOutput} instead. * Output type for S3 list API. Lists bucket objects with pagination. */ export type ListPaginateOutput = StorageListOutput & { @@ -62,11 +110,30 @@ export type ListPaginateOutput = StorageListOutput & { }; /** - * Output type for S3 copy API. + * Output type with path for S3 list API. Lists bucket objects with pagination. + */ +export type ListPaginateWithPathOutput = + StorageListOutput & { + nextToken?: string; + }; + +/** + * Output type with path for S3 copy API. + * @deprecated Use {@link CopyWithPathOutput} instead. + */ +export type CopyOutput = Pick; +/** + * Output type with path for S3 copy API. + */ +export type CopyWithPathOutput = Pick; + +/** + * @deprecated Use {@link RemoveWithPathOutput} instead. + * Output type with key for S3 remove API. */ -export type CopyOutput = Pick; +export type RemoveOutput = Pick; /** - * Output type for S3 remove API. + * Output type with path for S3 remove API. */ -export type RemoveOutput = Pick; +export type RemoveWithPathOutput = Pick; diff --git a/packages/storage/src/providers/s3/utils/client/utils/parsePayload.ts b/packages/storage/src/providers/s3/utils/client/utils/parsePayload.ts index 242fa99aff4..9da44dcbdd0 100644 --- a/packages/storage/src/providers/s3/utils/client/utils/parsePayload.ts +++ b/packages/storage/src/providers/s3/utils/client/utils/parsePayload.ts @@ -39,7 +39,7 @@ export const parseXmlBody = async (response: HttpResponse): Promise => { try { return parser.parse(data); } catch (error) { - throw new Error('Failed to parse XML response.'); + throw new Error(`Failed to parse XML response: ${error}`); } } diff --git a/packages/storage/src/providers/s3/utils/constants.ts b/packages/storage/src/providers/s3/utils/constants.ts index 9e48b80047f..482343e5494 100644 --- a/packages/storage/src/providers/s3/utils/constants.ts +++ b/packages/storage/src/providers/s3/utils/constants.ts @@ -19,3 +19,7 @@ export const MAX_PARTS_COUNT = 10000; export const DEFAULT_QUEUE_SIZE = 4; export const UPLOADS_STORAGE_KEY = '__uploadInProgress'; + +export const STORAGE_INPUT_PREFIX = 'prefix'; +export const STORAGE_INPUT_KEY = 'key'; +export const STORAGE_INPUT_PATH = 'path'; diff --git a/packages/storage/src/providers/s3/utils/index.ts b/packages/storage/src/providers/s3/utils/index.ts index fe8ee9db247..cd6b9753019 100644 --- a/packages/storage/src/providers/s3/utils/index.ts +++ b/packages/storage/src/providers/s3/utils/index.ts @@ -4,3 +4,6 @@ export { calculateContentMd5 } from './md5'; export { resolveS3ConfigAndInput } from './resolveS3ConfigAndInput'; export { createDownloadTask, createUploadTask } from './transferTask'; +export { validateStorageOperationInput } from './validateStorageOperationInput'; +export { validateStorageOperationInputWithPrefix } from './validateStorageOperationInputWithPrefix'; +export { isInputWithPath } from './isInputWithPath'; diff --git a/packages/storage/src/providers/s3/utils/isInputWithPath.ts b/packages/storage/src/providers/s3/utils/isInputWithPath.ts new file mode 100644 index 00000000000..86e5351f914 --- /dev/null +++ b/packages/storage/src/providers/s3/utils/isInputWithPath.ts @@ -0,0 +1,13 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + StorageOperationInput, + StorageOperationInputWithPath, +} from '../../../types/inputs'; + +export const isInputWithPath = ( + input: StorageOperationInput, +): input is StorageOperationInputWithPath => { + return input.path !== undefined; +}; diff --git a/packages/storage/src/providers/s3/utils/resolveS3ConfigAndInput.ts b/packages/storage/src/providers/s3/utils/resolveS3ConfigAndInput.ts index d99149f6c0a..701c046d52f 100644 --- a/packages/storage/src/providers/s3/utils/resolveS3ConfigAndInput.ts +++ b/packages/storage/src/providers/s3/utils/resolveS3ConfigAndInput.ts @@ -5,7 +5,6 @@ import { AmplifyClassV6, StorageAccessLevel } from '@aws-amplify/core'; import { assertValidationError } from '../../../errors/utils/assertValidationError'; import { StorageValidationErrorCode } from '../../../errors/types/validation'; -import { StorageError } from '../../../errors/StorageError'; import { resolvePrefix as defaultPrefixResolver } from '../../../utils/resolvePrefix'; import { ResolvedS3Config } from '../types/options'; @@ -22,6 +21,7 @@ interface ResolvedS3ConfigAndInput { bucket: string; keyPrefix: string; isObjectLockEnabled?: boolean; + identityId?: string; } /** @@ -30,7 +30,7 @@ interface ResolvedS3ConfigAndInput { * @param {AmplifyClassV6} amplify The Amplify instance. * @param {S3ApiOptions} apiOptions The input options for S3 provider. * @returns {Promise} The resolved common input options for S3 API handlers. - * @throws A {@link StorageError} with `error.name` from {@link StorageValidationErrorCode} indicating invalid + * @throws A `StorageError` with `error.name` from `StorageValidationErrorCode` indicating invalid * configurations or Amplify library options. * * @internal @@ -84,6 +84,7 @@ export const resolveS3ConfigAndInput = async ( }, bucket, keyPrefix, + identityId, isObjectLockEnabled, }; }; diff --git a/packages/storage/src/providers/s3/utils/validateStorageOperationInput.ts b/packages/storage/src/providers/s3/utils/validateStorageOperationInput.ts new file mode 100644 index 00000000000..585701c81e9 --- /dev/null +++ b/packages/storage/src/providers/s3/utils/validateStorageOperationInput.ts @@ -0,0 +1,39 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { StorageOperationInput as Input } from '../../../types/inputs'; +import { assertValidationError } from '../../../errors/utils/assertValidationError'; +import { StorageValidationErrorCode } from '../../../errors/types/validation'; + +import { isInputWithPath } from './isInputWithPath'; +import { STORAGE_INPUT_KEY, STORAGE_INPUT_PATH } from './constants'; + +export const validateStorageOperationInput = ( + input: Input, + identityId?: string, +) => { + assertValidationError( + // Key present without a path + (!!(input as Input).key && !(input as Input).path) || + // Path present without a key + (!(input as Input).key && !!(input as Input).path), + StorageValidationErrorCode.InvalidStorageOperationInput, + ); + + if (isInputWithPath(input)) { + const { path } = input; + const objectKey = typeof path === 'string' ? path : path({ identityId }); + + assertValidationError( + !objectKey.startsWith('/'), + StorageValidationErrorCode.InvalidStoragePathInput, + ); + + return { + inputType: STORAGE_INPUT_PATH, + objectKey, + }; + } else { + return { inputType: STORAGE_INPUT_KEY, objectKey: input.key }; + } +}; diff --git a/packages/storage/src/providers/s3/utils/validateStorageOperationInputWithPrefix.ts b/packages/storage/src/providers/s3/utils/validateStorageOperationInputWithPrefix.ts new file mode 100644 index 00000000000..da1068af010 --- /dev/null +++ b/packages/storage/src/providers/s3/utils/validateStorageOperationInputWithPrefix.ts @@ -0,0 +1,46 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + StorageOperationInputWithPath, + StorageOperationInputWithPrefixPath, +} from '../../../types/inputs'; +import { assertValidationError } from '../../../errors/utils/assertValidationError'; +import { StorageValidationErrorCode } from '../../../errors/types/validation'; + +import { STORAGE_INPUT_PATH, STORAGE_INPUT_PREFIX } from './constants'; + +// Local assertion function with StorageOperationInputWithPrefixPath as Input +const _isInputWithPath = ( + input: StorageOperationInputWithPrefixPath, +): input is StorageOperationInputWithPath => { + return input.path !== undefined; +}; + +export const validateStorageOperationInputWithPrefix = ( + input: StorageOperationInputWithPrefixPath, + identityId?: string, +) => { + // Validate prefix & path not present at the same time + assertValidationError( + !(input.prefix && input.path), + StorageValidationErrorCode.InvalidStorageOperationPrefixInput, + ); + if (_isInputWithPath(input)) { + const { path } = input; + const objectKey = typeof path === 'string' ? path : path({ identityId }); + + // Assert on no leading slash in the path parameter + assertValidationError( + !objectKey.startsWith('/'), + StorageValidationErrorCode.InvalidStoragePathInput, + ); + + return { + inputType: STORAGE_INPUT_PATH, + objectKey, + }; + } else { + return { inputType: STORAGE_INPUT_PREFIX, objectKey: input.prefix ?? '' }; + } +}; diff --git a/packages/storage/src/types/index.ts b/packages/storage/src/types/index.ts index 39bb1c049a3..317fa20104c 100644 --- a/packages/storage/src/types/index.ts +++ b/packages/storage/src/types/index.ts @@ -8,14 +8,20 @@ export { UploadTask, } from './common'; export { - StorageOperationInput, - StorageListInput, - StorageGetPropertiesInput, - StorageRemoveInput, - StorageDownloadDataInput, - StorageUploadDataInput, - StorageCopyInput, - StorageGetUrlInput, + StorageGetPropertiesInputWithKey, + StorageGetPropertiesInputWithPath, + StorageListInputWithPrefix, + StorageListInputWithPath, + StorageRemoveInputWithPath, + StorageRemoveInputWithKey, + StorageDownloadDataInputWithKey, + StorageDownloadDataInputWithPath, + StorageUploadDataInputWithKey, + StorageUploadDataInputWithPath, + StorageCopyInputWithKey, + StorageCopyInputWithPath, + StorageGetUrlInputWithKey, + StorageGetUrlInputWithPath, StorageUploadDataPayload, } from './inputs'; export { @@ -26,6 +32,8 @@ export { } from './options'; export { StorageItem, + StorageItemWithKey, + StorageItemWithPath, StorageListOutput, StorageDownloadDataOutput, StorageGetUrlOutput, diff --git a/packages/storage/src/types/inputs.ts b/packages/storage/src/types/inputs.ts index 861ebf53876..403a2a14332 100644 --- a/packages/storage/src/types/inputs.ts +++ b/packages/storage/src/types/inputs.ts @@ -1,44 +1,88 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import { StrictUnion } from '@aws-amplify/core/internals/utils'; + import { StorageListAllOptions, StorageListPaginateOptions, StorageOptions, } from './options'; -export interface StorageOperationInput { +export type StorageOperationInput = StrictUnion< + StorageOperationInputWithKey | StorageOperationInputWithPath +>; +export type StorageOperationInputWithPrefixPath = StrictUnion< + StorageOperationInputWithPath | StorageOperationInputWithPrefix +>; +/** @deprecated Use {@link StorageOperationInputWithPath} instead. */ +export interface StorageOperationInputWithKey { + /** @deprecated Use `path` instead. */ key: string; - options?: Options; +} +export interface StorageOperationInputWithPath { + path: string | (({ identityId }: { identityId?: string }) => string); } -export type StorageGetPropertiesInput = - StorageOperationInput; +/** @deprecated Use {@link StorageOperationInputWithPath} instead. */ +export interface StorageOperationInputWithPrefix { + /** @deprecated Use `path` instead. */ + prefix?: string; +} -export interface StorageRemoveInput { - key: string; +export interface StorageOperationOptionsInput { options?: Options; } -export interface StorageListInput< +/** @deprecated Use {@link StorageDownloadDataInputWithPath} instead. */ +export type StorageDownloadDataInputWithKey = + StorageOperationInputWithKey & StorageOperationOptionsInput; + +export type StorageDownloadDataInputWithPath = + StorageOperationInputWithPath & StorageOperationOptionsInput; + +/** @deprecated Use {@link StorageGetPropertiesInputWithPath} instead. */ +export type StorageGetPropertiesInputWithKey = + StorageOperationInputWithKey & StorageOperationOptionsInput; + +export type StorageGetPropertiesInputWithPath = + StorageOperationInputWithPath & StorageOperationOptionsInput; + +export type StorageRemoveInputWithKey = StorageOperationInputWithKey & + StorageOperationOptionsInput; + +export type StorageRemoveInputWithPath = + StorageOperationInputWithPath & StorageOperationOptionsInput; + +/** @deprecated Use {@link StorageListInputWithPath} instead. */ +export type StorageListInputWithPrefix< Options extends StorageListAllOptions | StorageListPaginateOptions, -> { - prefix?: string; - options?: Options; -} +> = StorageOperationInputWithPrefix & StorageOperationOptionsInput; + +export type StorageListInputWithPath< + Options extends StorageListAllOptions | StorageListPaginateOptions, +> = StorageOperationInputWithPath & StorageOperationOptionsInput; -export type StorageGetUrlInput = - StorageOperationInput; +/** @deprecated Use {@link StorageGetUrlInputWithPath} instead. */ +export type StorageGetUrlInputWithKey = + StorageOperationInputWithKey & StorageOperationOptionsInput; -export type StorageDownloadDataInput = - StorageOperationInput; +export type StorageGetUrlInputWithPath = + StorageOperationInputWithPath & StorageOperationOptionsInput; -export type StorageUploadDataInput = - StorageOperationInput & { - data: StorageUploadDataPayload; - }; +/** @deprecated Use {@link StorageUploadDataInputWithPath} instead. */ +export type StorageUploadDataInputWithKey = + StorageOperationInputWithKey & + StorageOperationOptionsInput & + StorageUploadDataInputPayload; -export interface StorageCopyInput< +export type StorageUploadDataInputWithPath = + StorageOperationInputWithPath & + StorageOperationOptionsInput & + StorageUploadDataInputPayload; + +/** @deprecated Use {@link StorageCopyInputWithPath} instead. */ +export interface StorageCopyInputWithKey< SourceOptions extends StorageOptions, DestinationOptions extends StorageOptions, > { @@ -46,6 +90,11 @@ export interface StorageCopyInput< destination: DestinationOptions; } +export interface StorageCopyInputWithPath { + source: StorageOperationInputWithPath; + destination: StorageOperationInputWithPath; +} + /** * The data payload type for upload operation. */ @@ -54,3 +103,7 @@ export type StorageUploadDataPayload = | ArrayBufferView | ArrayBuffer | string; + +export interface StorageUploadDataInputPayload { + data: StorageUploadDataPayload; +} diff --git a/packages/storage/src/types/options.ts b/packages/storage/src/types/options.ts index df992df5838..b9c74590ba6 100644 --- a/packages/storage/src/types/options.ts +++ b/packages/storage/src/types/options.ts @@ -4,6 +4,7 @@ import { StorageAccessLevel } from '@aws-amplify/core'; export interface StorageOptions { + /** @deprecated This may be removed in the next major version. */ accessLevel?: StorageAccessLevel; } diff --git a/packages/storage/src/types/outputs.ts b/packages/storage/src/types/outputs.ts index b5b3a9236c0..e38482729b8 100644 --- a/packages/storage/src/types/outputs.ts +++ b/packages/storage/src/types/outputs.ts @@ -3,11 +3,10 @@ import { ResponseBodyMixin } from '@aws-amplify/core/internals/aws-client-utils'; -export interface StorageItem { - /** - * Key of the object - */ - key: string; +/** + * Base type for a storage item. + */ +export interface StorageItemBase { /** * Creation date of the object. */ @@ -28,7 +27,28 @@ export interface StorageItem { metadata?: Record; } -export type StorageDownloadDataOutput = T & { +/** @deprecated Use {@link StorageItemWithPath} instead. */ +export type StorageItemWithKey = StorageItemBase & { + /** + * @deprecated This may be removed in next major version. + * Key of the object. + */ + key: string; +}; + +export type StorageItemWithPath = StorageItemBase & { + /** + * Path of the object. + */ + path: string; +}; + +/** + * A storage item can be identified either by a key or a path. + */ +export type StorageItem = StorageItemWithKey | StorageItemWithPath; + +export type StorageDownloadDataOutput = Item & { body: ResponseBodyMixin; }; @@ -46,5 +66,8 @@ export interface StorageGetUrlOutput { export type StorageUploadOutput = Item; export interface StorageListOutput { + /** + * List of items returned by the list API. + */ items: Item[]; } diff --git a/yarn.lock b/yarn.lock index 182e8479aee..83c82754b5f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -24,10 +24,10 @@ graphql "15.8.0" rxjs "^7.8.1" -"@aws-amplify/data-schema@^0.17.0": - version "0.17.0" - resolved "https://registry.yarnpkg.com/@aws-amplify/data-schema/-/data-schema-0.17.0.tgz#f6950c3e66fcc17acfdceb67f562e87c15e47698" - integrity sha512-UTKz2Jpd7aLPlLql/eY1hINXRsIIW7bUxNU0uVzxUaj8Jk31delAT2qvOeSkCE8He66VSPrpYKXLp3w8tlMUAA== +"@aws-amplify/data-schema@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@aws-amplify/data-schema/-/data-schema-1.0.0.tgz#435f7320bdde034e130a12d0c3613e18d5089b08" + integrity sha512-pvXko9gDnBqyNshXwrkAer3CbFGkwtlMI35wJk48+N1rxLXKLYElADZj6kWwZXmBbI67uD4ZJ3s62Qzsb6lZZA== dependencies: "@aws-amplify/data-schema-types" "*" "@types/aws-lambda" "^8.10.134"