Skip to content

Commit

Permalink
refactor: upload data unification, less permissions and less endpoints (
Browse files Browse the repository at this point in the history
  • Loading branch information
dpgaspar authored Jan 28, 2025
1 parent 09c1987 commit 1b375b7
Show file tree
Hide file tree
Showing 14 changed files with 311 additions and 488 deletions.
1 change: 1 addition & 0 deletions UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ assists people when migrating to a new version.

## Next

- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: /api/v1/database/<id>/<file type>_upload and /api/v1/database/<file type>_metadata, in favour of new one (Details on the PR). And simplifies permissions.
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ import userEvent from '@testing-library/user-event';
import { waitFor } from '@testing-library/react';
import { UploadFile } from 'antd/lib/upload/interface';

fetchMock.post('glob:*api/v1/database/1/csv_upload/', {});
fetchMock.post('glob:*api/v1/database/1/excel_upload/', {});
fetchMock.post('glob:*api/v1/database/1/columnar_upload/', {});
fetchMock.post('glob:*api/v1/database/1/upload/', {});

fetchMock.get(
'glob:*api/v1/database/?q=(filters:!((col:allow_file_upload,opr:eq,value:!t)),page:0,page_size:100)',
Expand Down Expand Up @@ -643,18 +641,21 @@ test('CSV, form post', async () => {
});

userEvent.click(uploadButton);
await waitFor(() => fetchMock.called('glob:*api/v1/database/1/csv_upload/'));
await waitFor(() => fetchMock.called('glob:*api/v1/database/1/upload/'));

// Get the matching fetch calls made
const matchingCalls = fetchMock.calls('glob:*api/v1/database/1/csv_upload/');
const matchingCalls = fetchMock.calls('glob:*api/v1/database/1/upload/');
expect(matchingCalls).toHaveLength(1);
const [_, options] = matchingCalls[0];
const formData = options?.body as FormData;
expect(formData.get('type')).toBe('csv');
expect(formData.get('table_name')).toBe('table1');
expect(formData.get('schema')).toBe('public');
expect(formData.get('table_name')).toBe('table1');
const fileData = formData.get('file') as File;
expect(fileData.name).toBe('test.csv');
// Avoid leaking fetchMock calls
fetchMock.resetHistory();
});

test('Excel, form post', async () => {
Expand Down Expand Up @@ -700,22 +701,21 @@ test('Excel, form post', async () => {
});

userEvent.click(uploadButton);
await waitFor(() =>
fetchMock.called('glob:*api/v1/database/1/excel_upload/'),
);
await waitFor(() => fetchMock.called('glob:*api/v1/database/1/upload/'));

// Get the matching fetch calls made
const matchingCalls = fetchMock.calls(
'glob:*api/v1/database/1/excel_upload/',
);
const matchingCalls = fetchMock.calls('glob:*api/v1/database/1/upload/');
expect(matchingCalls).toHaveLength(1);
const [_, options] = matchingCalls[0];
const formData = options?.body as FormData;
expect(formData.get('type')).toBe('excel');
expect(formData.get('table_name')).toBe('table1');
expect(formData.get('schema')).toBe('public');
expect(formData.get('table_name')).toBe('table1');
const fileData = formData.get('file') as File;
expect(fileData.name).toBe('test.xls');
// Avoid leaking fetchMock calls
fetchMock.resetHistory();
});

test('Columnar, form post', async () => {
Expand Down Expand Up @@ -761,22 +761,21 @@ test('Columnar, form post', async () => {
});

userEvent.click(uploadButton);
await waitFor(() =>
fetchMock.called('glob:*api/v1/database/1/columnar_upload/'),
);
await waitFor(() => fetchMock.called('glob:*api/v1/database/1/upload/'));

// Get the matching fetch calls made
const matchingCalls = fetchMock.calls(
'glob:*api/v1/database/1/columnar_upload/',
);
const matchingCalls = fetchMock.calls('glob:*api/v1/database/1/upload/');
expect(matchingCalls).toHaveLength(1);
const [_, options] = matchingCalls[0];
const formData = options?.body as FormData;
expect(formData.get('type')).toBe('columnar');
expect(formData.get('table_name')).toBe('table1');
expect(formData.get('schema')).toBe('public');
expect(formData.get('table_name')).toBe('table1');
const fileData = formData.get('file') as File;
expect(fileData.name).toBe('test.parquet');
// Avoid leaking fetchMock calls
fetchMock.resetHistory();
});

test('CSV, validate file extension returns false', () => {
Expand Down
21 changes: 6 additions & 15 deletions superset-frontend/src/features/databases/UploadDataModel/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -230,19 +230,8 @@ const UploadDataModal: FunctionComponent<UploadDataModalProps> = ({
const [previewUploadedFile, setPreviewUploadedFile] = useState<boolean>(true);
const [fileLoading, setFileLoading] = useState<boolean>(false);

const createTypeToEndpointMap = (
databaseId: number,
): { [key: string]: string } => ({
csv: `/api/v1/database/${databaseId}/csv_upload/`,
excel: `/api/v1/database/${databaseId}/excel_upload/`,
columnar: `/api/v1/database/${databaseId}/columnar_upload/`,
});

const typeToFileMetadataEndpoint = {
csv: '/api/v1/database/csv_metadata/',
excel: '/api/v1/database/excel_metadata/',
columnar: '/api/v1/database/columnar_metadata/',
};
const createTypeToEndpointMap = (databaseId: number) =>
`/api/v1/database/${databaseId}/upload/`;

const nullValuesOptions = [
{
Expand Down Expand Up @@ -389,9 +378,10 @@ const UploadDataModal: FunctionComponent<UploadDataModalProps> = ({
if (type === 'csv') {
formData.append('delimiter', mergedValues.delimiter);
}
formData.append('type', type);
setFileLoading(true);
return SupersetClient.post({
endpoint: typeToFileMetadataEndpoint[type],
endpoint: '/api/v1/database/upload_metadata/',
body: formData,
headers: { Accept: 'application/json' },
})
Expand Down Expand Up @@ -472,7 +462,8 @@ const UploadDataModal: FunctionComponent<UploadDataModalProps> = ({
}
appendFormData(formData, mergedValues);
setIsLoading(true);
const endpoint = createTypeToEndpointMap(currentDatabaseId)[type];
const endpoint = createTypeToEndpointMap(currentDatabaseId);
formData.append('type', type);
return SupersetClient.post({
endpoint,
body: formData,
Expand Down
3 changes: 1 addition & 2 deletions superset-frontend/src/features/home/RightMenu.test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -168,8 +168,7 @@ const resetUseSelectorMock = () => {
permissions: {},
roles: {
Admin: [
['can_csv_upload', 'Database'], // So we can upload CSV
['can_excel_upload', 'Database'], // So we can upload CSV
['can_upload', 'Database'], // So we can upload data (CSV, Excel, Columnar)
['can_write', 'Database'], // So we can write DBs
['can_write', 'Dataset'], // So we can write Datasets
['can_write', 'Chart'], // So we can write Datasets
Expand Down
6 changes: 3 additions & 3 deletions superset-frontend/src/views/CRUD/utils.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -507,14 +507,14 @@ export const uploadUserPerms = (
allowedExt: Array<string>,
) => {
const canUploadCSV =
findPermission('can_csv_upload', 'Database', roles) &&
findPermission('can_upload', 'Database', roles) &&
checkUploadExtensions(csvExt, allowedExt);
const canUploadColumnar =
checkUploadExtensions(colExt, allowedExt) &&
findPermission('can_columnar_upload', 'Database', roles);
findPermission('can_upload', 'Database', roles);
const canUploadExcel =
checkUploadExtensions(excelExt, allowedExt) &&
findPermission('can_excel_upload', 'Database', roles);
findPermission('can_upload', 'Database', roles);
return {
canUploadCSV,
canUploadColumnar,
Expand Down
7 changes: 7 additions & 0 deletions superset/commands/database/uploaders/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
from superset.daos.database import DatabaseDAO
from superset.models.core import Database
from superset.sql_parse import Table
from superset.utils.backports import StrEnum
from superset.utils.core import get_user
from superset.utils.decorators import on_error, transaction
from superset.views.database.validators import schema_allows_file_upload
Expand All @@ -45,6 +46,12 @@
READ_CHUNK_SIZE = 1000


class UploadFileType(StrEnum):
CSV = "csv"
EXCEL = "excel"
COLUMNAR = "columnar"


class ReaderOptions(TypedDict, total=False):
already_exists: str
index_label: str
Expand Down
4 changes: 1 addition & 3 deletions superset/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,7 @@ class RouteMethod: # pylint: disable=too-few-public-methods
"delete_object": "write",
"copy_dash": "write",
"get_connection": "write",
"excel_metadata": "excel_upload",
"columnar_metadata": "columnar_upload",
"csv_metadata": "csv_upload",
"upload_metadata": "upload",
"slack_channels": "write",
"put_filters": "write",
"put_colors": "write",
Expand Down
Loading

0 comments on commit 1b375b7

Please sign in to comment.