Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/thick-flies-fail.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@aws-amplify/ui-react-storage': minor
---

deduplicate storage browser locations which have broader permissions
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@ jest.mock('../../../storage-internal');
const mockListCallerAccessGrants = jest.mocked(listCallerAccessGrants);

const generateMockLocations = (size: number, mockLocations: LocationAccess) =>
Array<LocationAccess>(size).fill(mockLocations);
Array.from({ length: size }, (_, i) => ({
...mockLocations,
scope: `s3://bucket/prefix${i + 1}/*`,
}));

const accountId = 'account-id';
const credentials: LocationCredentialsProvider = jest.fn();
Expand Down Expand Up @@ -80,18 +83,20 @@ describe('listLocationsHandler', () => {
permission: 'READWRITE',
type: 'PREFIX',
};
const allLocations = generateMockLocations(5, mockLocation);

const mockOutputPage1: ListLocationsOutput = {
locations: [mockLocation],
locations: [allLocations[0]],
nextToken: 'token1',
};

const mockOutputPage2: ListLocationsOutput = {
locations: [...generateMockLocations(2, mockLocation)],
locations: [allLocations[1], allLocations[2]],
nextToken: 'token2',
};

const mockOutputPage3: ListLocationsOutput = {
locations: [...generateMockLocations(2, mockLocation)],
locations: [allLocations[3], allLocations[4]],
nextToken: undefined,
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ jest.mock('aws-amplify', () => ({

import {
createFileDataItem,
deduplicateLocations,
getBucketRegion,
getFileKey,
getFilteredLocations,
Expand Down Expand Up @@ -246,6 +247,207 @@ describe('utils', () => {
});
});

describe('deduplicateLocations', () => {
it('returns all locations when there are no duplicates', () => {
const locations: LocationData[] = [
{
bucket: 'bucket1',
id: 'id1',
permissions: ['get', 'list'],
prefix: 'prefix1/',
type: 'PREFIX',
},
{
bucket: 'bucket2',
id: 'id2',
permissions: ['get', 'list'],
prefix: 'prefix2/',
type: 'PREFIX',
},
];

const result = deduplicateLocations(locations);
expect(result).toHaveLength(2);
expect(result).toEqual(locations);
});

it('deduplicates READ + READWRITE, keeping READWRITE (superset)', () => {
const locations: LocationData[] = [
{
bucket: 'idfc-sboms',
id: 'id1',
permissions: ['get', 'list'],
prefix: '',
type: 'BUCKET',
},
{
bucket: 'idfc-sboms',
id: 'id2',
permissions: ['delete', 'get', 'list', 'write'],
prefix: '',
type: 'BUCKET',
},
];

const result = deduplicateLocations(locations);
expect(result).toHaveLength(1);
expect(result[0].permissions).toEqual(['delete', 'get', 'list', 'write']);
expect(result[0].id).toBe('id2');
});

it('does NOT deduplicate READ + WRITE (not superset, keeps both)', () => {
const locations: LocationData[] = [
{
bucket: 'bucket1',
id: 'id1',
permissions: ['get', 'list'],
prefix: 'prefix/',
type: 'PREFIX',
},
{
bucket: 'bucket1',
id: 'id2',
permissions: ['delete', 'write'],
prefix: 'prefix/',
type: 'PREFIX',
},
];

const result = deduplicateLocations(locations);
expect(result).toHaveLength(2);
expect(result.find((l) => l.id === 'id1')).toBeDefined();
expect(result.find((l) => l.id === 'id2')).toBeDefined();
});

it('keeps first location when permissions are identical', () => {
const locations: LocationData[] = [
{
bucket: 'bucket1',
id: 'id1',
permissions: ['get', 'list'],
prefix: 'prefix/',
type: 'PREFIX',
},
{
bucket: 'bucket1',
id: 'id2',
permissions: ['get', 'list'],
prefix: 'prefix/',
type: 'PREFIX',
},
];

const result = deduplicateLocations(locations);
expect(result).toHaveLength(1);
expect(result[0].id).toBe('id1');
});

it('deduplicates when READWRITE is superset of READ and WRITE', () => {
const locations: LocationData[] = [
{
bucket: 'bucket1',
id: 'id1',
permissions: ['get', 'list'],
prefix: 'prefix/',
type: 'PREFIX',
},
{
bucket: 'bucket1',
id: 'id2',
permissions: ['delete', 'write'],
prefix: 'prefix/',
type: 'PREFIX',
},
{
bucket: 'bucket1',
id: 'id3',
permissions: ['delete', 'get', 'list', 'write'],
prefix: 'prefix/',
type: 'PREFIX',
},
];

const result = deduplicateLocations(locations);
// READWRITE is superset of both READ and WRITE, so only READWRITE should remain
expect(result).toHaveLength(1);
expect(result[0].permissions).toEqual(['delete', 'get', 'list', 'write']);
expect(result[0].id).toBe('id3');
});

it('treats different prefixes as separate locations', () => {
const locations: LocationData[] = [
{
bucket: 'bucket1',
id: 'id1',
permissions: ['get', 'list'],
prefix: 'prefix1/',
type: 'PREFIX',
},
{
bucket: 'bucket1',
id: 'id2',
permissions: ['delete', 'get', 'list', 'write'],
prefix: 'prefix2/',
type: 'PREFIX',
},
];

const result = deduplicateLocations(locations);
expect(result).toHaveLength(2);
});

it('treats different buckets as separate locations', () => {
const locations: LocationData[] = [
{
bucket: 'bucket1',
id: 'id1',
permissions: ['get', 'list'],
prefix: 'prefix/',
type: 'PREFIX',
},
{
bucket: 'bucket2',
id: 'id2',
permissions: ['get', 'list'],
prefix: 'prefix/',
type: 'PREFIX',
},
];

const result = deduplicateLocations(locations);
expect(result).toHaveLength(2);
});

it('handles empty location array', () => {
const result = deduplicateLocations([]);
expect(result).toEqual([]);
});

it('deduplicates WRITE + READWRITE, keeping READWRITE (superset)', () => {
const locations: LocationData[] = [
{
bucket: 'bucket1',
id: 'id1',
permissions: ['delete', 'write'],
prefix: 'prefix/',
type: 'PREFIX',
},
{
bucket: 'bucket1',
id: 'id2',
permissions: ['delete', 'get', 'list', 'write'],
prefix: 'prefix/',
type: 'PREFIX',
},
];

const result = deduplicateLocations(locations);
expect(result).toHaveLength(1);
expect(result[0].permissions).toEqual(['delete', 'get', 'list', 'write']);
expect(result[0].id).toBe('id2');
});
});

describe('getBucketRegion', () => {
beforeEach(() => {
jest.clearAllMocks();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import type {
LocationData,
ActionInputConfig,
} from './types';
import { getFilteredLocations } from './utils';
import { deduplicateLocations, getFilteredLocations } from './utils';

const DEFAULT_PAGE_SIZE = 1000;

Expand Down Expand Up @@ -76,5 +76,11 @@ export const listLocationsHandler: ListLocationsHandler = async (input) => {
return { items, nextToken: output.nextToken };
};

return fetchLocations([], nextToken);
const result = await fetchLocations([], nextToken);

// Deduplicate locations with the same bucket and prefix, keeping broader permissions
return {
items: deduplicateLocations(result.items),
nextToken: result.nextToken,
};
};
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,86 @@ export const shouldExcludeLocation = (
return excludedByPermssions || excludedByType;
};

/**
* Determines if permissions1 is a strict superset (broader) of permissions2.
* Returns true only if permissions1 contains all permissions from permissions2
* AND has more permissions.
*/
const hasBroaderPermissions = (
permissions1: LocationPermissions,
permissions2: LocationPermissions
): boolean => {
// permissions1 must have more permissions (strict superset, not equal)
if (permissions1.length <= permissions2.length) {
return false;
}

// permissions1 must contain all permissions from permissions2
return permissions2.every((perm) => permissions1.includes(perm));
};

/**
* Deduplicates locations with the same bucket and prefix.
* Only deduplicates when one location's permissions are a superset of another's.
* This prevents deduplication of incompatible grants like READ + WRITE.
*
* Examples:
* - READ + READWRITE → Keep READWRITE (superset)
* - READ + READ → Keep first (identical)
* - READ + WRITE → Keep both (not superset, need separate locations)
*/
export const deduplicateLocations = (
locations: LocationData[]
): LocationData[] => {
// Group locations by bucket:prefix
const locationGroups = new Map<string, LocationData[]>();

for (const location of locations) {
const key = `${location.bucket}:${location.prefix}`;
const group = locationGroups.get(key) ?? [];
group.push(location);
locationGroups.set(key, group);
}

// For each group, keep only non-redundant locations
const result: LocationData[] = [];

for (const group of locationGroups.values()) {
if (group.length === 1) {
result.push(group[0]);
continue;
}

// Find locations that are not subsets of any other location in the group
const nonRedundant: LocationData[] = [];

for (const location of group) {
// Check if this location is a subset of any other location
const isSubsetOfAnother = group.some((other) => {
if (other === location) return false;
return hasBroaderPermissions(other.permissions, location.permissions);
});

if (!isSubsetOfAnother) {
// Check if we already have an identical permission set
const isDuplicate = nonRedundant.some((existing) => {
const sortedNew = [...location.permissions].sort().join(',');
const sortedExisting = [...existing.permissions].sort().join(',');
return sortedNew === sortedExisting;
});

if (!isDuplicate) {
nonRedundant.push(location);
}
}
}

result.push(...nonRedundant);
}

return result;
};

export const getFilteredLocations = (
locations: AccessGrantLocation[],
exclude?: ListLocationsExcludeOptions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import type { LocationData } from '../../actions';
import type { ListPathsOutput } from '../../storage-internal';
import { listPaths } from '../../storage-internal';
import type { ListLocations, ListLocationsInput } from '../../actions';
import { deduplicateLocations } from '../../actions/handlers/utils';

import { parseAmplifyAuthPermission } from '../permissionParsers';
import { getPaginatedLocations } from './getPaginatedLocations';
Expand Down Expand Up @@ -36,7 +37,8 @@ export const createAmplifyListLocationsHandler = (): ListLocations => {
}
);

cachedItems = sanitizedItems;
// Deduplicate locations with the same bucket and prefix, keeping broader permissions
cachedItems = deduplicateLocations(sanitizedItems);

return getPaginatedLocations({
items: cachedItems,
Expand Down
Loading