diff --git a/acceptance/bundle/apps/app_yaml/out.app.yml.txt b/acceptance/bundle/apps/app_yaml/out.app.yml.txt index 29cb7da70f..e79c7a4f57 100644 --- a/acceptance/bundle/apps/app_yaml/out.app.yml.txt +++ b/acceptance/bundle/apps/app_yaml/out.app.yml.txt @@ -1,8 +1,12 @@ { "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/app/app.yml", - "q": { - "overwrite": "true" - }, - "raw_body": "command:\n - python\n - app.py\n" + "path": "/api/2.0/workspace/import", + "body": { + "multipart_form": { + "content": "command:\n - python\n - app.py\n", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/app/app.yml" + } + } } diff --git a/acceptance/bundle/apps/app_yaml/output.txt b/acceptance/bundle/apps/app_yaml/output.txt index addb91d683..7733b0f8ed 100644 --- a/acceptance/bundle/apps/app_yaml/output.txt +++ b/acceptance/bundle/apps/app_yaml/output.txt @@ -19,7 +19,7 @@ Deploying resources... Updating deployment state... Deployment complete! ->>> jq select(.path | test("app.yml")) out.requests.txt +>>> jq select(.body.multipart_form.path | strings | test("app.yml")) out.requests.txt >>> [CLI] bundle destroy --auto-approve The following resources will be deleted: diff --git a/acceptance/bundle/apps/app_yaml/script b/acceptance/bundle/apps/app_yaml/script index d0dd8d5f80..adec87dc21 100644 --- a/acceptance/bundle/apps/app_yaml/script +++ b/acceptance/bundle/apps/app_yaml/script @@ -1,7 +1,7 @@ trace $CLI bundle validate trace $CLI bundle plan trace $CLI bundle deploy -trace jq 'select(.path | test("app.yml"))' out.requests.txt | sed 's/\\r//g' > out.app.yml.txt +trace jq 'select(.body.multipart_form.path | strings | test("app.yml"))' out.requests.txt | sed 's/\\r//g' > out.app.yml.txt #trace print_requests.py //apps # currently fails due to TF inserting description="" rm out.requests.txt diff --git a/acceptance/bundle/artifacts/artifact_upload_for_workspace/output.txt b/acceptance/bundle/artifacts/artifact_upload_for_workspace/output.txt index dd81ba7de2..b9d8cfeb72 100644 --- a/acceptance/bundle/artifacts/artifact_upload_for_workspace/output.txt +++ b/acceptance/bundle/artifacts/artifact_upload_for_workspace/output.txt @@ -50,9 +50,9 @@ Deployment complete! ] === Expecting wheel to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source.whl" -"/api/2.0/workspace-files/import-file/Workspace/foo/bar/artifacts/.internal/source.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source.whl +/Workspace/foo/bar/artifacts/.internal/source.whl === Expecting environment dependencies to be updated >>> jq -s .[] | select(.path=="/api/2.2/jobs/create") | .body.environments out.requests.txt diff --git a/acceptance/bundle/artifacts/artifact_upload_for_workspace/script b/acceptance/bundle/artifacts/artifact_upload_for_workspace/script index e6b1437627..cd28cc070f 100644 --- a/acceptance/bundle/artifacts/artifact_upload_for_workspace/script +++ b/acceptance/bundle/artifacts/artifact_upload_for_workspace/script @@ -8,7 +8,7 @@ title "Expecting 2 wheels in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting wheel to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort title "Expecting environment dependencies to be updated" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.environments' out.requests.txt diff --git a/acceptance/bundle/artifacts/artifact_upload_with_no_library_reference/output.txt b/acceptance/bundle/artifacts/artifact_upload_with_no_library_reference/output.txt index 6d24880e6c..d309b10cad 100644 --- a/acceptance/bundle/artifacts/artifact_upload_with_no_library_reference/output.txt +++ b/acceptance/bundle/artifacts/artifact_upload_with_no_library_reference/output.txt @@ -6,9 +6,9 @@ Deploying resources... Deployment complete! === Expecting wheel to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source.whl" -"/api/2.0/workspace-files/import-file/Workspace/foo/bar/artifacts/.internal/source.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source.whl +/Workspace/foo/bar/artifacts/.internal/source.whl === Expecting delete request to artifact_path/.internal folder >>> jq -s .[] | select(.path=="/api/2.0/workspace/delete") | select(.body.path | test(".*/artifacts/.internal")) out.requests.txt diff --git a/acceptance/bundle/artifacts/artifact_upload_with_no_library_reference/script b/acceptance/bundle/artifacts/artifact_upload_with_no_library_reference/script index 883601185c..b5ee485cbd 100644 --- a/acceptance/bundle/artifacts/artifact_upload_with_no_library_reference/script +++ b/acceptance/bundle/artifacts/artifact_upload_with_no_library_reference/script @@ -5,7 +5,7 @@ echo "test wheel content" > whl/source.whl trace $CLI bundle deploy title "Expecting wheel to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort title "Expecting delete request to artifact_path/.internal folder" trace jq -s '.[] | select(.path=="/api/2.0/workspace/delete") | select(.body.path | test(".*/artifacts/.internal"))' out.requests.txt diff --git a/acceptance/bundle/artifacts/upload_multiple_libraries/output.txt b/acceptance/bundle/artifacts/upload_multiple_libraries/output.txt index fa725a29d8..7e5436900e 100644 --- a/acceptance/bundle/artifacts/upload_multiple_libraries/output.txt +++ b/acceptance/bundle/artifacts/upload_multiple_libraries/output.txt @@ -40,15 +40,15 @@ Deployment complete! ] === Expecting 4 wheels to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source1.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source2.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source3.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source4.whl" -"/api/2.0/workspace-files/import-file/Workspace/foo/bar/artifacts/.internal/source1.whl" -"/api/2.0/workspace-files/import-file/Workspace/foo/bar/artifacts/.internal/source2.whl" -"/api/2.0/workspace-files/import-file/Workspace/foo/bar/artifacts/.internal/source3.whl" -"/api/2.0/workspace-files/import-file/Workspace/foo/bar/artifacts/.internal/source4.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source1.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source2.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source3.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/whl/source4.whl +/Workspace/foo/bar/artifacts/.internal/source1.whl +/Workspace/foo/bar/artifacts/.internal/source2.whl +/Workspace/foo/bar/artifacts/.internal/source3.whl +/Workspace/foo/bar/artifacts/.internal/source4.whl === Expecting environment dependencies to be updated >>> jq -s .[] | select(.path=="/api/2.2/jobs/create") | .body.environments out.requests.txt diff --git a/acceptance/bundle/artifacts/upload_multiple_libraries/script b/acceptance/bundle/artifacts/upload_multiple_libraries/script index f615147f8c..7625d8947f 100644 --- a/acceptance/bundle/artifacts/upload_multiple_libraries/script +++ b/acceptance/bundle/artifacts/upload_multiple_libraries/script @@ -11,7 +11,7 @@ title "Expecting 5 wheels in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting 4 wheels to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort title "Expecting environment dependencies to be updated" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.environments' out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_change_version/output.txt b/acceptance/bundle/artifacts/whl_change_version/output.txt index 4fc7ff6674..4451e8811e 100644 --- a/acceptance/bundle/artifacts/whl_change_version/output.txt +++ b/acceptance/bundle/artifacts/whl_change_version/output.txt @@ -29,23 +29,23 @@ dist/my_test_code-0.1.0-py3-none-any.whl ] === Expecting 1 wheel to be uploaded ->>> jq .path out.requests.txt -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.1.0-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/.gitignore" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/databricks.yml" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.1.0-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/__init__.py" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/__main__.py" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/out.requests.txt" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/repls.json" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/script" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/setup.py" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/test.toml" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/STATE_FILENAME" +>>> jq -r .body.multipart_form.path | strings out.requests.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.1.0-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/.gitignore +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/databricks.yml +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.1.0-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/__init__.py +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/__main__.py +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/out.requests.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/repls.json +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/script +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/setup.py +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/test.toml +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/STATE_FILENAME >>> update_file.py my_test_code/__init__.py 0.1.0 0.2.0 @@ -66,16 +66,16 @@ dist/my_test_code-0.2.0-py3-none-any.whl json[0].libraries[0].whl = "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.2.0-py3-none-any.whl"; === Expecting 1 wheel to be uploaded ->>> jq .path out.requests.txt -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.2.0-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.2.0-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/__init__.py" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/out.requests.txt" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/STATE_FILENAME" +>>> jq -r .body.multipart_form.path | strings out.requests.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.2.0-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.2.0-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/__init__.py +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/out.requests.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/STATE_FILENAME === Restore config to target old wheel >>> update_file.py databricks.yml ./dist/*.whl ./dist/my*0.1.0*.whl @@ -98,14 +98,14 @@ dist/my_test_code-0.2.0-py3-none-any.whl json[0].libraries[0].whl = "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.1.0-py3-none-any.whl"; === Expecting 1 wheel to be uploaded ->>> jq .path out.requests.txt -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.1.0-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.2.0-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/databricks.yml" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.2.0-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/out.requests.txt" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/STATE_FILENAME" +>>> jq -r .body.multipart_form.path | strings out.requests.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.1.0-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.2.0-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/databricks.yml +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.2.0-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/out.requests.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/STATE_FILENAME diff --git a/acceptance/bundle/artifacts/whl_change_version/script b/acceptance/bundle/artifacts/whl_change_version/script index 9f6f480f0d..68f33c9563 100644 --- a/acceptance/bundle/artifacts/whl_change_version/script +++ b/acceptance/bundle/artifacts/whl_change_version/script @@ -6,7 +6,7 @@ title "Expecting 1 wheel in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting 1 wheel to be uploaded" -trace jq .path out.requests.txt | grep import | sort +trace jq -r '.body.multipart_form.path | strings' out.requests.txt | sort rm out.requests.txt @@ -20,7 +20,7 @@ title "Expecting 1 wheel in libraries section in /jobs/reset" trace jq -s '.[] | select(.path=="/api/2.2/jobs/reset") | .body.new_settings.tasks' out.requests.txt | gron.py | grep -w libraries title "Expecting 1 wheel to be uploaded" -trace jq .path out.requests.txt | grep import | sort +trace jq -r '.body.multipart_form.path | strings' out.requests.txt | sort rm out.requests.txt @@ -34,6 +34,6 @@ title "Expecting 1 wheel in libraries section in /jobs/reset" trace jq -s '.[] | select(.path=="/api/2.2/jobs/reset") | .body.new_settings.tasks' out.requests.txt | gron.py | grep -w libraries title "Expecting 1 wheel to be uploaded" -trace jq .path out.requests.txt | grep import | sort +trace jq -r '.body.multipart_form.path | strings' out.requests.txt | sort rm out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_dbfs/output.txt b/acceptance/bundle/artifacts/whl_dbfs/output.txt index 306de22de1..2d1d4ecbda 100644 --- a/acceptance/bundle/artifacts/whl_dbfs/output.txt +++ b/acceptance/bundle/artifacts/whl_dbfs/output.txt @@ -27,6 +27,6 @@ Deployment complete! ] === Expecting no wheels to be uploaded ->>> errcode sh -c jq .path < out.requests.txt | grep import | grep whl +>>> errcode sh -c jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl Exit code: 1 diff --git a/acceptance/bundle/artifacts/whl_dbfs/script b/acceptance/bundle/artifacts/whl_dbfs/script index d7c93d8f38..3f15b85912 100644 --- a/acceptance/bundle/artifacts/whl_dbfs/script +++ b/acceptance/bundle/artifacts/whl_dbfs/script @@ -7,6 +7,6 @@ title "Expecting 1 wheel in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting no wheels to be uploaded" -trace errcode sh -c 'jq .path < out.requests.txt | grep import | grep whl' +trace errcode sh -c "jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl" rm out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_dynamic/output.txt b/acceptance/bundle/artifacts/whl_dynamic/output.txt index c7409f3740..c66cee0294 100644 --- a/acceptance/bundle/artifacts/whl_dynamic/output.txt +++ b/acceptance/bundle/artifacts/whl_dynamic/output.txt @@ -58,11 +58,11 @@ json[1].libraries[0].whl = "/Workspace/Users/[USERNAME]/.bundle/test-bundle/defa json[1].libraries[1].whl = "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl"; === Expecting 2 patched wheels to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/prebuilt/other_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/prebuilt/other_test_code-0.0.1-py3-none-any.whl === Updating the local wheel and deploying again Building my_test_code... @@ -94,7 +94,7 @@ json[1].libraries[0].whl = "/Workspace/Users/[USERNAME]/.bundle/test-bundle/defa json[1].libraries[1].whl = "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl"; === Expecting 2 pached wheels to be uploaded (Bad: it is currently uploaded twice) ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][2]-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][2]-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_dynamic/script b/acceptance/bundle/artifacts/whl_dynamic/script index 068db16caf..1e02902b5e 100644 --- a/acceptance/bundle/artifacts/whl_dynamic/script +++ b/acceptance/bundle/artifacts/whl_dynamic/script @@ -19,7 +19,7 @@ title "Expecting 2 patched wheels in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt | gron.py | grep -w libraries title "Expecting 2 patched wheels to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm out.requests.txt @@ -35,4 +35,4 @@ title "Expecting 2 patched wheels in libraries section in /jobs/reset" trace jq -s '.[] | select(.path=="/api/2.2/jobs/reset") | .body.new_settings.tasks' out.requests.txt | gron.py | grep -w libraries title "Expecting 2 pached wheels to be uploaded (Bad: it is currently uploaded twice)" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort diff --git a/acceptance/bundle/artifacts/whl_explicit/output.txt b/acceptance/bundle/artifacts/whl_explicit/output.txt index 6c1c06c46e..bbac28dc11 100644 --- a/acceptance/bundle/artifacts/whl_explicit/output.txt +++ b/acceptance/bundle/artifacts/whl_explicit/output.txt @@ -29,9 +29,9 @@ my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl ] === Expecting 1 wheel to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl === Expecting delete request to artifact_path/.internal folder >>> jq -s .[] | select(.path=="/api/2.0/workspace/delete") | select(.body.path | test(".*/artifacts/.internal")) out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_explicit/script b/acceptance/bundle/artifacts/whl_explicit/script index 4988edd815..db664434f0 100644 --- a/acceptance/bundle/artifacts/whl_explicit/script +++ b/acceptance/bundle/artifacts/whl_explicit/script @@ -6,7 +6,7 @@ title "Expecting 1 wheel in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting 1 wheel to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort title "Expecting delete request to artifact_path/.internal folder" trace jq -s '.[] | select(.path=="/api/2.0/workspace/delete") | select(.body.path | test(".*/artifacts/.internal"))' out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_implicit/output.txt b/acceptance/bundle/artifacts/whl_implicit/output.txt index 698cc20c51..a3dc47e712 100644 --- a/acceptance/bundle/artifacts/whl_implicit/output.txt +++ b/acceptance/bundle/artifacts/whl_implicit/output.txt @@ -29,6 +29,6 @@ dist/my_test_code-0.0.1-py3-none-any.whl ] === Expecting 1 wheels to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_implicit/script b/acceptance/bundle/artifacts/whl_implicit/script index da61fb91f8..948e73d11d 100644 --- a/acceptance/bundle/artifacts/whl_implicit/script +++ b/acceptance/bundle/artifacts/whl_implicit/script @@ -6,6 +6,6 @@ title "Expecting 1 wheels in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting 1 wheels to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_implicit_custom_path/output.txt b/acceptance/bundle/artifacts/whl_implicit_custom_path/output.txt index 71f1bffc1d..dd3d94f7be 100644 --- a/acceptance/bundle/artifacts/whl_implicit_custom_path/output.txt +++ b/acceptance/bundle/artifacts/whl_implicit_custom_path/output.txt @@ -41,6 +41,6 @@ package/my_test_code-0.0.1-py3-none-any.whl } === Expecting 1 wheel to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/package/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/package/my_test_code-0.0.1-py3-none-any.whl +/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_implicit_custom_path/script b/acceptance/bundle/artifacts/whl_implicit_custom_path/script index fdc0723f59..13f1ba1b6e 100644 --- a/acceptance/bundle/artifacts/whl_implicit_custom_path/script +++ b/acceptance/bundle/artifacts/whl_implicit_custom_path/script @@ -6,6 +6,6 @@ title "Expecting 1 wheel in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body' out.requests.txt title "Expecting 1 wheel to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_implicit_notebook/output.txt b/acceptance/bundle/artifacts/whl_implicit_notebook/output.txt index 7e393eabc4..1c5bbc1251 100644 --- a/acceptance/bundle/artifacts/whl_implicit_notebook/output.txt +++ b/acceptance/bundle/artifacts/whl_implicit_notebook/output.txt @@ -19,6 +19,6 @@ dist/my_test_code-0.0.1-py3-none-any.whl ] === Expecting 1 wheel to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_implicit_notebook/script b/acceptance/bundle/artifacts/whl_implicit_notebook/script index 30196556f1..8d68a61e6d 100644 --- a/acceptance/bundle/artifacts/whl_implicit_notebook/script +++ b/acceptance/bundle/artifacts/whl_implicit_notebook/script @@ -6,6 +6,6 @@ title "Expecting 1 wheel in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks[0].libraries' out.requests.txt title "Expecting 1 wheel to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_multiple/output.txt b/acceptance/bundle/artifacts/whl_multiple/output.txt index a03025d910..565f8524b9 100644 --- a/acceptance/bundle/artifacts/whl_multiple/output.txt +++ b/acceptance/bundle/artifacts/whl_multiple/output.txt @@ -35,8 +35,8 @@ my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl ] === Expecting 2 wheels to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_multiple/script b/acceptance/bundle/artifacts/whl_multiple/script index 380c6c60f8..dfaa1bbb94 100644 --- a/acceptance/bundle/artifacts/whl_multiple/script +++ b/acceptance/bundle/artifacts/whl_multiple/script @@ -6,6 +6,6 @@ title "Expecting 2 wheels in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting 2 wheels to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm -fr out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_no_cleanup/output.txt b/acceptance/bundle/artifacts/whl_no_cleanup/output.txt index a78810dbca..b98f17dab6 100644 --- a/acceptance/bundle/artifacts/whl_no_cleanup/output.txt +++ b/acceptance/bundle/artifacts/whl_no_cleanup/output.txt @@ -11,9 +11,9 @@ Deployment complete! dist/my_test_code-0.0.1-py3-none-any.whl === Expecting 1 wheels to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.0.1-py3-none-any.whl >>> [CLI] bundle deploy Building python_artifact... diff --git a/acceptance/bundle/artifacts/whl_no_cleanup/script b/acceptance/bundle/artifacts/whl_no_cleanup/script index 4536f92946..21e0d84ace 100644 --- a/acceptance/bundle/artifacts/whl_no_cleanup/script +++ b/acceptance/bundle/artifacts/whl_no_cleanup/script @@ -3,7 +3,7 @@ trace $CLI bundle deploy trace find.py --expect 1 whl title "Expecting 1 wheels to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort trace $CLI bundle deploy title "No calls to delete internal folder expected" diff --git a/acceptance/bundle/artifacts/whl_prebuilt_multiple/output.txt b/acceptance/bundle/artifacts/whl_prebuilt_multiple/output.txt index 4fe390e635..02bf0311f7 100644 --- a/acceptance/bundle/artifacts/whl_prebuilt_multiple/output.txt +++ b/acceptance/bundle/artifacts/whl_prebuilt_multiple/output.txt @@ -38,8 +38,8 @@ dist/my_test_code-0.0.1-py3-none-any.whl ] === Expecting 2 wheels to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/lib/other_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/lib/other_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/dist/my_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_prebuilt_multiple/script b/acceptance/bundle/artifacts/whl_prebuilt_multiple/script index 9df7ea3d98..977c9431ea 100644 --- a/acceptance/bundle/artifacts/whl_prebuilt_multiple/script +++ b/acceptance/bundle/artifacts/whl_prebuilt_multiple/script @@ -9,6 +9,6 @@ title "Expecting 2 wheels in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting 2 wheels to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_prebuilt_outside/output.txt b/acceptance/bundle/artifacts/whl_prebuilt_outside/output.txt index 369db2e47a..39495b82b1 100644 --- a/acceptance/bundle/artifacts/whl_prebuilt_outside/output.txt +++ b/acceptance/bundle/artifacts/whl_prebuilt_outside/output.txt @@ -32,6 +32,6 @@ Deployment complete! ] === Expecting 2 wheels to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_prebuilt_outside/script b/acceptance/bundle/artifacts/whl_prebuilt_outside/script index 939847f6c1..7459288248 100644 --- a/acceptance/bundle/artifacts/whl_prebuilt_outside/script +++ b/acceptance/bundle/artifacts/whl_prebuilt_outside/script @@ -11,6 +11,6 @@ title "Expecting 2 wheels in libraries section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requests.txt title "Expecting 2 wheels to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/output.txt b/acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/output.txt index 570b124a5b..2615973a40 100644 --- a/acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/output.txt +++ b/acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/output.txt @@ -56,6 +56,6 @@ Deployment complete! ] === Expecting 2 wheels to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/script b/acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/script index af4e84b8b3..7306df94e9 100644 --- a/acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/script +++ b/acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/script @@ -14,6 +14,6 @@ trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.tasks' out.requ trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body.environments' out.requests.txt title "Expecting 2 wheels to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm out.requests.txt diff --git a/acceptance/bundle/artifacts/whl_via_environment_key/output.txt b/acceptance/bundle/artifacts/whl_via_environment_key/output.txt index 8afa59e7d8..a5468f4dae 100644 --- a/acceptance/bundle/artifacts/whl_via_environment_key/output.txt +++ b/acceptance/bundle/artifacts/whl_via_environment_key/output.txt @@ -49,6 +49,6 @@ my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl } === Expecting 1 wheel to be uploaded ->>> jq .path -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" -"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl" +>>> jq -r .body.multipart_form.path | strings +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl +/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_via_environment_key/script b/acceptance/bundle/artifacts/whl_via_environment_key/script index 3a0dd929c0..0a4fbfcff2 100644 --- a/acceptance/bundle/artifacts/whl_via_environment_key/script +++ b/acceptance/bundle/artifacts/whl_via_environment_key/script @@ -6,6 +6,6 @@ title "Expecting 1 wheel in environments section in /jobs/create" trace jq -s '.[] | select(.path=="/api/2.2/jobs/create") | .body' out.requests.txt title "Expecting 1 wheel to be uploaded" -trace jq .path < out.requests.txt | grep import | grep whl | sort +trace jq -r '.body.multipart_form.path | strings' < out.requests.txt | grep whl | sort rm out.requests.txt diff --git a/acceptance/bundle/deployment/bind/pipelines/recreate/output.txt b/acceptance/bundle/deployment/bind/pipelines/recreate/output.txt index af42322dee..04f3b651bf 100644 --- a/acceptance/bundle/deployment/bind/pipelines/recreate/output.txt +++ b/acceptance/bundle/deployment/bind/pipelines/recreate/output.txt @@ -1,5 +1,5 @@ ->>> print_requests.py ^//import-file/ ^//workspace/ +>>> print_requests.py ^//workspace/ >>> [CLI] bundle summary -o json @@ -31,4 +31,4 @@ Deploying resources... Updating deployment state... Deployment complete! ->>> print_requests.py ^//import-file/ ^//workspace/ ^//telemetry-ext +>>> print_requests.py ^//workspace/ ^//telemetry-ext diff --git a/acceptance/bundle/deployment/bind/pipelines/recreate/script b/acceptance/bundle/deployment/bind/pipelines/recreate/script index aba4cfb97a..a6dcfbf583 100644 --- a/acceptance/bundle/deployment/bind/pipelines/recreate/script +++ b/acceptance/bundle/deployment/bind/pipelines/recreate/script @@ -6,11 +6,11 @@ add_repl.py $NEW_PIPELINE_ID NEW_PIPELINE_ID rm -f out.requests.txt trace musterr $CLI bundle deployment bind foo $NEW_PIPELINE_ID &> out.bind-fail.$DATABRICKS_BUNDLE_ENGINE.txt -print_requests.py '^//import-file/' '^//workspace/' +print_requests.py '^//workspace/' rm -f out.requests.txt trace $CLI bundle deployment bind foo $NEW_PIPELINE_ID --auto-approve &> out.bind-success.$DATABRICKS_BUNDLE_ENGINE.txt -trace print_requests.py '^//import-file/' '^//workspace/' +trace print_requests.py '^//workspace/' trace $CLI bundle summary -o json | jq .resources > out.summary.json trace $CLI bundle plan @@ -19,4 +19,4 @@ trace musterr $CLI bundle deploy rm -f out.requests.txt trace $CLI bundle deploy --auto-approve -trace print_requests.py '^//import-file/' '^//workspace/' '^//telemetry-ext' > out.deploy.requests.json +trace print_requests.py '^//workspace/' '^//telemetry-ext' > out.deploy.requests.json diff --git a/acceptance/bundle/deployment/bind/pipelines/update/out.deploy.requests.direct.json b/acceptance/bundle/deployment/bind/pipelines/update/out.deploy.requests.direct.json index 78f256ba44..0ff6df9c9c 100644 --- a/acceptance/bundle/deployment/bind/pipelines/update/out.deploy.requests.direct.json +++ b/acceptance/bundle/deployment/bind/pipelines/update/out.deploy.requests.direct.json @@ -1,17 +1,3 @@ -{ - "method": "POST", - "path": "/api/2.0/workspace/mkdirs", - "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-pipeline-recreate/default/artifacts/.internal" - } -} -{ - "method": "POST", - "path": "/api/2.0/workspace/mkdirs", - "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-pipeline-recreate/default/files" - } -} { "method": "PUT", "path": "/api/2.0/pipelines/[NEW_PIPELINE_ID]", diff --git a/acceptance/bundle/deployment/bind/pipelines/update/out.deploy.requests.terraform.json b/acceptance/bundle/deployment/bind/pipelines/update/out.deploy.requests.terraform.json index 3c8d1946b8..ab034f0d55 100644 --- a/acceptance/bundle/deployment/bind/pipelines/update/out.deploy.requests.terraform.json +++ b/acceptance/bundle/deployment/bind/pipelines/update/out.deploy.requests.terraform.json @@ -1,17 +1,3 @@ -{ - "method": "POST", - "path": "/api/2.0/workspace/mkdirs", - "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-pipeline-recreate/default/artifacts/.internal" - } -} -{ - "method": "POST", - "path": "/api/2.0/workspace/mkdirs", - "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-pipeline-recreate/default/files" - } -} { "method": "PUT", "path": "/api/2.0/pipelines/[NEW_PIPELINE_ID]", diff --git a/acceptance/bundle/deployment/bind/pipelines/update/output.txt b/acceptance/bundle/deployment/bind/pipelines/update/output.txt index f88ed70d69..da118c9981 100644 --- a/acceptance/bundle/deployment/bind/pipelines/update/output.txt +++ b/acceptance/bundle/deployment/bind/pipelines/update/output.txt @@ -12,4 +12,4 @@ Deploying resources... Updating deployment state... Deployment complete! ->>> print_requests.py ^//import-file/ ^//workspace/delete ^//telemetry-ext +>>> print_requests.py ^//workspace/ ^//telemetry-ext diff --git a/acceptance/bundle/deployment/bind/pipelines/update/script b/acceptance/bundle/deployment/bind/pipelines/update/script index 5d2e487f10..8b1441a746 100644 --- a/acceptance/bundle/deployment/bind/pipelines/update/script +++ b/acceptance/bundle/deployment/bind/pipelines/update/script @@ -3,15 +3,15 @@ add_repl.py $NEW_PIPELINE_ID NEW_PIPELINE_ID rm -f out.requests.txt trace musterr $CLI bundle deployment bind foo $NEW_PIPELINE_ID &> out.bind-fail.$DATABRICKS_BUNDLE_ENGINE.txt -print_requests.py '^//import-file/' '^//workspace/delete' +print_requests.py '^//workspace/' rm -f out.requests.txt trace $CLI bundle deployment bind foo $NEW_PIPELINE_ID --auto-approve &> out.bind-success.$DATABRICKS_BUNDLE_ENGINE.txt -print_requests.py '^//import-file/' '^//workspace/delete' +print_requests.py '^//workspace/' trace $CLI bundle summary -o json > out.summary.json trace $CLI bundle plan rm -f out.requests.txt trace $CLI bundle deploy --auto-approve -trace print_requests.py '^//import-file/' '^//workspace/delete' '^//telemetry-ext' > out.deploy.requests.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests.py '^//workspace/' '^//telemetry-ext' > out.deploy.requests.$DATABRICKS_BUNDLE_ENGINE.json diff --git a/acceptance/bundle/libraries/outside_of_bundle_root/output.txt b/acceptance/bundle/libraries/outside_of_bundle_root/output.txt index e7d6530536..85c2d65d20 100644 --- a/acceptance/bundle/libraries/outside_of_bundle_root/output.txt +++ b/acceptance/bundle/libraries/outside_of_bundle_root/output.txt @@ -53,8 +53,12 @@ Deployment complete! >>> cat out.requests.txt { "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/outside_of_bundle_root/default/artifacts/.internal/test.whl", - "q": { - "overwrite": "true" + "path": "/api/2.0/workspace/import", + "body": { + "multipart_form": { + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/outside_of_bundle_root/default/artifacts/.internal/test.whl" + } } } diff --git a/acceptance/bundle/libraries/outside_of_bundle_root/script b/acceptance/bundle/libraries/outside_of_bundle_root/script index ff72be1b69..74f4f15cf8 100644 --- a/acceptance/bundle/libraries/outside_of_bundle_root/script +++ b/acceptance/bundle/libraries/outside_of_bundle_root/script @@ -11,6 +11,6 @@ title "Check that the job libraries are uploaded and the path is correct in the trace cat out.requests.txt | jq 'select(.path == "/api/2.2/jobs/create")' | jq '.body.tasks[0].libraries' trace cat out.requests.txt | jq 'select(.path == "/api/2.2/jobs/create")' | jq '.body.environments[0].spec.dependencies' trace cat out.requests.txt | jq 'select(.path == "/api/2.0/pipelines" and .method == "POST")' | jq '.body.environment.dependencies' -trace cat out.requests.txt | jq 'select(.path | test("/api/2.0/workspace-files/import-file/Workspace/Users/.*/.bundle/outside_of_bundle_root/default/artifacts/.internal/test.whl"))' +trace cat out.requests.txt | jq 'select(.path == "/api/2.0/workspace/import" and (.body.multipart_form.path // "" | test("/Workspace/Users/.*/.bundle/outside_of_bundle_root/default/artifacts/.internal/test.whl")))' | jq 'del(.body.multipart_form.content)' rm out.requests.txt diff --git a/acceptance/bundle/resource_deps/remote_app_url/output.txt b/acceptance/bundle/resource_deps/remote_app_url/output.txt index 81b300ab11..279b795afb 100644 --- a/acceptance/bundle/resource_deps/remote_app_url/output.txt +++ b/acceptance/bundle/resource_deps/remote_app_url/output.txt @@ -14,7 +14,7 @@ create pipelines.mypipeline Plan: 2 to add, 0 to change, 0 to delete, 0 unchanged ->>> print_requests.py ^//import-file/ +>>> print_requests.py ^//workspace/import { "method": "POST", "path": "/api/2.0/workspace/mkdirs", @@ -29,7 +29,7 @@ Deploying resources... Updating deployment state... Deployment complete! ->>> print_requests.py ^//import-file/ +>>> print_requests.py ^//workspace/import { "method": "POST", "path": "/api/2.0/workspace/delete", @@ -91,7 +91,7 @@ All files and directories at the following location will be deleted: /Workspace/ Deleting files... Destroy complete! ->>> print_requests.py --sort ^//import-file/ +>>> print_requests.py --sort ^//workspace/import { "method": "DELETE", "path": "/api/2.0/apps/myapp" diff --git a/acceptance/bundle/resource_deps/remote_app_url/script b/acceptance/bundle/resource_deps/remote_app_url/script index d38692366b..51ce6274ed 100644 --- a/acceptance/bundle/resource_deps/remote_app_url/script +++ b/acceptance/bundle/resource_deps/remote_app_url/script @@ -1,9 +1,9 @@ trace $CLI bundle validate trace $CLI bundle plan -trace print_requests.py '^//import-file/' +trace print_requests.py '^//workspace/import' trace $CLI bundle deploy -trace print_requests.py '^//import-file/' +trace print_requests.py '^//workspace/import' trace $CLI bundle destroy --auto-approve -trace print_requests.py --sort '^//import-file/' +trace print_requests.py --sort '^//workspace/import' diff --git a/acceptance/bundle/resources/quality_monitors/change_assets_dir/output.txt b/acceptance/bundle/resources/quality_monitors/change_assets_dir/output.txt index 08f6c53ae1..b569000be3 100644 --- a/acceptance/bundle/resources/quality_monitors/change_assets_dir/output.txt +++ b/acceptance/bundle/resources/quality_monitors/change_assets_dir/output.txt @@ -23,7 +23,7 @@ Deployment complete! >>> [CLI] bundle plan Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged ->>> print_requests.py ^//import-file/ ^//workspace/ ^//telemetry-ext +>>> print_requests.py ^//workspace/ ^//telemetry-ext >>> [CLI] bundle destroy --auto-approve The following resources will be deleted: diff --git a/acceptance/bundle/resources/quality_monitors/change_assets_dir/script b/acceptance/bundle/resources/quality_monitors/change_assets_dir/script index 6caf49a7f4..2bdc84ee12 100644 --- a/acceptance/bundle/resources/quality_monitors/change_assets_dir/script +++ b/acceptance/bundle/resources/quality_monitors/change_assets_dir/script @@ -26,5 +26,5 @@ trace errcode $CLI bundle plan -o json &> out.plan.$DATABRICKS_BUNDLE_ENGINE.jso rm out.requests.txt trace errcode $CLI bundle deploy &> out.deploy.$DATABRICKS_BUNDLE_ENGINE.txt -trace print_requests.py '^//import-file/' '^//workspace/' '^//telemetry-ext' > out.deploy.requests.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests.py '^//workspace/' '^//telemetry-ext' > out.deploy.requests.$DATABRICKS_BUNDLE_ENGINE.json trace errcode $CLI bundle plan &> out.plan_after_deploy.$DATABRICKS_BUNDLE_ENGINE.txt diff --git a/acceptance/bundle/resources/quality_monitors/change_output_schema_name/output.txt b/acceptance/bundle/resources/quality_monitors/change_output_schema_name/output.txt index d67ee41975..0f021b4f87 100644 --- a/acceptance/bundle/resources/quality_monitors/change_output_schema_name/output.txt +++ b/acceptance/bundle/resources/quality_monitors/change_output_schema_name/output.txt @@ -36,7 +36,7 @@ Deploying resources... Updating deployment state... Deployment complete! ->>> print_requests.py ^//import-file/ ^//workspace/ ^//telemetry-ext +>>> print_requests.py ^//workspace/ ^//telemetry-ext >>> [CLI] bundle plan Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged diff --git a/acceptance/bundle/resources/quality_monitors/change_output_schema_name/script b/acceptance/bundle/resources/quality_monitors/change_output_schema_name/script index 72c57e0840..e7e800e526 100644 --- a/acceptance/bundle/resources/quality_monitors/change_output_schema_name/script +++ b/acceptance/bundle/resources/quality_monitors/change_output_schema_name/script @@ -27,6 +27,6 @@ trace $CLI bundle plan -o json > out.plan.$DATABRICKS_BUNDLE_ENGINE.json rm out.requests.txt trace $CLI bundle deploy # dashboard_id is output only field that terraform adds -trace print_requests.py '^//import-file/' '^//workspace/' '^//telemetry-ext' | grep -v '"dashboard_id":' > out.deploy.requests.json +trace print_requests.py '^//workspace/' '^//telemetry-ext' | grep -v '"dashboard_id":' > out.deploy.requests.json trace $CLI bundle plan | contains.py "1 unchanged" diff --git a/acceptance/bundle/resources/quality_monitors/change_table_name/output.txt b/acceptance/bundle/resources/quality_monitors/change_table_name/output.txt index 879222aa8a..48520444bb 100644 --- a/acceptance/bundle/resources/quality_monitors/change_table_name/output.txt +++ b/acceptance/bundle/resources/quality_monitors/change_table_name/output.txt @@ -23,7 +23,7 @@ Deployment complete! >>> [CLI] bundle plan Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged ->>> print_requests.py ^//import-file/ ^//workspace/ ^//telemetry-ext +>>> print_requests.py ^//workspace/ ^//telemetry-ext >>> [CLI] bundle destroy --auto-approve The following resources will be deleted: diff --git a/acceptance/bundle/resources/quality_monitors/change_table_name/script b/acceptance/bundle/resources/quality_monitors/change_table_name/script index 891aece1c1..0bec176f6a 100644 --- a/acceptance/bundle/resources/quality_monitors/change_table_name/script +++ b/acceptance/bundle/resources/quality_monitors/change_table_name/script @@ -26,7 +26,7 @@ trace errcode $CLI bundle plan -o json &> out.plan.$DATABRICKS_BUNDLE_ENGINE.jso rm out.requests.txt trace errcode $CLI bundle deploy &> out.deploy.$DATABRICKS_BUNDLE_ENGINE.txt -trace print_requests.py '^//import-file/' '^//workspace/' '^//telemetry-ext' > out.deploy.requests.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests.py '^//workspace/' '^//telemetry-ext' > out.deploy.requests.$DATABRICKS_BUNDLE_ENGINE.json trace errcode $CLI bundle plan &> out.plan_after_deploy.$DATABRICKS_BUNDLE_ENGINE.txt trace errcode $CLI quality-monitors get ${TABLE_NAME}_2 2> /dev/null > out.get.$DATABRICKS_BUNDLE_ENGINE.json diff --git a/acceptance/bundle/resources/quality_monitors/create/output.txt b/acceptance/bundle/resources/quality_monitors/create/output.txt index 8037d5ec9c..7afde18578 100644 --- a/acceptance/bundle/resources/quality_monitors/create/output.txt +++ b/acceptance/bundle/resources/quality_monitors/create/output.txt @@ -16,7 +16,7 @@ Table main.qm_test_[UNIQUE_NAME].test_table is now visible (catalog_name=main) >>> [CLI] bundle plan -o json ->>> print_requests.py ^//import-file/ ^//workspace/ ^//telemetry-ext +>>> print_requests.py ^//workspace/ ^//telemetry-ext >>> [CLI] bundle plan -o json diff --git a/acceptance/bundle/resources/quality_monitors/create/script b/acceptance/bundle/resources/quality_monitors/create/script index 78c7853b26..d14dee8fbe 100644 --- a/acceptance/bundle/resources/quality_monitors/create/script +++ b/acceptance/bundle/resources/quality_monitors/create/script @@ -21,7 +21,7 @@ trace $CLI bundle plan -o json > out.plan_create.$DATABRICKS_BUNDLE_ENGINE.json rm out.requests.txt trace $CLI bundle deploy &> out.deploy.$DATABRICKS_BUNDLE_ENGINE.txt -trace print_requests.py '^//import-file/' '^//workspace/' '^//telemetry-ext' > out.deploy.requests.json +trace print_requests.py '^//workspace/' '^//telemetry-ext' > out.deploy.requests.json # store state to ensure we have table_name there print_state.py | grep name > out.state.$DATABRICKS_BUNDLE_ENGINE.txt diff --git a/acceptance/bundle/resources/secret_scopes/delete_scope/out.deploy.requests.txt b/acceptance/bundle/resources/secret_scopes/delete_scope/out.deploy.requests.txt index 2d469e4abc..b67ab28777 100644 --- a/acceptance/bundle/resources/secret_scopes/delete_scope/out.deploy.requests.txt +++ b/acceptance/bundle/resources/secret_scopes/delete_scope/out.deploy.requests.txt @@ -1,5 +1,5 @@ ->>> print_requests.py ^//import-file/ ^//workspace/ ^//telemetry-ext +>>> print_requests.py ^//workspace/ ^//telemetry-ext { "method": "POST", "path": "/api/2.0/secrets/scopes/delete", diff --git a/acceptance/bundle/resources/secret_scopes/delete_scope/script b/acceptance/bundle/resources/secret_scopes/delete_scope/script index b12e98775a..02a7ca6d1e 100755 --- a/acceptance/bundle/resources/secret_scopes/delete_scope/script +++ b/acceptance/bundle/resources/secret_scopes/delete_scope/script @@ -15,4 +15,4 @@ trace $CLI bundle plan &> out.plan.$DATABRICKS_BUNDLE_ENGINE.txt rm out.requests.txt trace $CLI bundle deploy -trace print_requests.py '^//import-file/' '^//workspace/' '^//telemetry-ext' &> out.deploy.requests.txt +trace print_requests.py '^//workspace/' '^//telemetry-ext' &> out.deploy.requests.txt diff --git a/acceptance/bundle/sync-upload-edge-cases/databricks.yml b/acceptance/bundle/sync-upload-edge-cases/databricks.yml new file mode 100644 index 0000000000..2261ff177c --- /dev/null +++ b/acceptance/bundle/sync-upload-edge-cases/databricks.yml @@ -0,0 +1,2 @@ +bundle: + name: upload-edge-cases diff --git a/acceptance/bundle/sync-upload-edge-cases/out.test.toml b/acceptance/bundle/sync-upload-edge-cases/out.test.toml new file mode 100644 index 0000000000..e90b6d5d1b --- /dev/null +++ b/acceptance/bundle/sync-upload-edge-cases/out.test.toml @@ -0,0 +1,3 @@ +Local = true +Cloud = false +EnvMatrix.DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/sync-upload-edge-cases/output.txt b/acceptance/bundle/sync-upload-edge-cases/output.txt new file mode 100644 index 0000000000..14a80b3044 --- /dev/null +++ b/acceptance/bundle/sync-upload-edge-cases/output.txt @@ -0,0 +1,72 @@ + +>>> [CLI] bundle sync --output text +Action: PUT: .gitignore, dashboard.lvdash.json, databricks.yml, empty.txt, héllo.txt, large.bin, notebook.py, with spaces.txt +Initial Sync Complete +Uploaded .gitignore +Uploaded dashboard.lvdash.json +Uploaded databricks.yml +Uploaded empty.txt +Uploaded héllo.txt +Uploaded large.bin +Uploaded notebook.py +Uploaded with spaces.txt + +=== uploaded paths from each multipart POST /workspace/import (sorted) +>>> jq -rs + map(select(.path == "/api/2.0/workspace/import")) | + map(.body.multipart_form.path) | + sort | .[] + out.requests.txt +/Workspace/Users/[USERNAME]/.bundle/upload-edge-cases/default/files/.gitignore +/Workspace/Users/[USERNAME]/.bundle/upload-edge-cases/default/files/dashboard.lvdash.json +/Workspace/Users/[USERNAME]/.bundle/upload-edge-cases/default/files/databricks.yml +/Workspace/Users/[USERNAME]/.bundle/upload-edge-cases/default/files/empty.txt +/Workspace/Users/[USERNAME]/.bundle/upload-edge-cases/default/files/héllo.txt +/Workspace/Users/[USERNAME]/.bundle/upload-edge-cases/default/files/large.bin +/Workspace/Users/[USERNAME]/.bundle/upload-edge-cases/default/files/notebook.py +/Workspace/Users/[USERNAME]/.bundle/upload-edge-cases/default/files/with spaces.txt + +=== every upload set format=AUTO +>>> jq -rs + map(select(.path == "/api/2.0/workspace/import")) | + map(.body.multipart_form.format) | unique + out.requests.txt +[ + "AUTO" +] + +=== recorded content for the small text inputs +>>> jq -rs + map(select(.path == "/api/2.0/workspace/import")) | + map(select(.body.multipart_form.path | test("/(notebook\\.py|héllo\\.txt|with spaces\\.txt|empty\\.txt|dashboard\\.lvdash\\.json)$"))) | + map({path: (.body.multipart_form.path | sub(".*/"; "")), content: .body.multipart_form.content}) | + sort_by(.path) | .[] + out.requests.txt +{ + "path": "dashboard.lvdash.json", + "content": "{\"datasets\":[],\"pages\":[]}\n" +} +{ + "path": "empty.txt", + "content": "" +} +{ + "path": "héllo.txt", + "content": "hello, naïve world\n" +} +{ + "path": "notebook.py", + "content": "# Databricks notebook source\nprint(\"hello\")\n" +} +{ + "path": "with spaces.txt", + "content": "hello, world\n" +} + +=== 12 MiB binary upload is summarized, not stored byte-for-byte +>>> jq -rs + map(select(.path == "/api/2.0/workspace/import")) | + map(select(.body.multipart_form.path | endswith("/large.bin"))) | + .[].body.multipart_form.content + out.requests.txt +[binary content [NUMID] bytes] diff --git a/acceptance/bundle/sync-upload-edge-cases/script b/acceptance/bundle/sync-upload-edge-cases/script new file mode 100644 index 0000000000..cc5233f045 --- /dev/null +++ b/acceptance/bundle/sync-upload-edge-cases/script @@ -0,0 +1,75 @@ +# Confidence checks that /workspace/import is a drop-in for the legacy import-file: +# verify the CLI's multipart upload pipeline handles the cases that differ between the +# two endpoints — large content (>10 MiB), empty content, varied extensions/object types, +# and non-ASCII / spaced filenames in the multipart `path` field. + +# Exclude framework-generated and script-internal files from the sync. We only want the +# inputs we explicitly generate below to show up in the recorded uploads. +cat > .gitignore <<'EOF' +script +output.txt +out.requests.txt +repls.json +test.toml +EOF + +# 12 MiB binary file. The legacy /workspace/import JSON-body cap is 10 MiB; multipart has +# no such cap. Generated dynamically to keep the repo small. +python3 -c "open('large.bin', 'wb').write(b'\\0' * (12 * 1024 * 1024))" + +# Empty file (multipart encodes an empty `content` part — distinct from JSON's empty string). +touch empty.txt + +# Python notebook: auto-detected as NOTEBOOK by /workspace/import; testserver mirrors +# this and stores the object at the path with .py stripped. +cat > notebook.py <<'EOF' +# Databricks notebook source +print("hello") +EOF + +# Lakeview dashboard descriptor: the real /workspace/import assigns object_type=DASHBOARD +# and preserves the .lvdash.json extension. The testserver doesn't emulate that and stores +# it as a generic FILE — but the upload-side request shape (which is what this test asserts) +# is identical. +echo '{"datasets":[],"pages":[]}' > dashboard.lvdash.json + +# Non-ASCII filename. Multipart encodes filenames with RFC 5987 / quoted-string rules, +# distinct from URL-encoding in the legacy import-file endpoint. +echo "hello, naïve world" > "héllo.txt" + +# Filename with a space. +echo "hello, world" > "with spaces.txt" + +trace $CLI bundle sync --output text 2>&1 | sort + +title "uploaded paths from each multipart POST /workspace/import (sorted)" +trace jq -rs ' + map(select(.path == "/api/2.0/workspace/import")) | + map(.body.multipart_form.path) | + sort | .[] +' out.requests.txt + +title "every upload set format=AUTO" +trace jq -rs ' + map(select(.path == "/api/2.0/workspace/import")) | + map(.body.multipart_form.format) | unique +' out.requests.txt + +title "recorded content for the small text inputs" +trace jq -rs ' + map(select(.path == "/api/2.0/workspace/import")) | + map(select(.body.multipart_form.path | test("/(notebook\\.py|héllo\\.txt|with spaces\\.txt|empty\\.txt|dashboard\\.lvdash\\.json)$"))) | + map({path: (.body.multipart_form.path | sub(".*/"; "")), content: .body.multipart_form.content}) | + sort_by(.path) | .[] +' out.requests.txt + +title "12 MiB binary upload is summarized, not stored byte-for-byte" +trace jq -rs ' + map(select(.path == "/api/2.0/workspace/import")) | + map(select(.body.multipart_form.path | endswith("/large.bin"))) | + .[].body.multipart_form.content +' out.requests.txt + +# Drop the recorded requests; their workspace-prefixed paths are noisy and the +# assertions above already pin the relevant fields. +rm out.requests.txt .gitignore diff --git a/acceptance/bundle/sync-upload-edge-cases/test.toml b/acceptance/bundle/sync-upload-edge-cases/test.toml new file mode 100644 index 0000000000..877dd9b2a9 --- /dev/null +++ b/acceptance/bundle/sync-upload-edge-cases/test.toml @@ -0,0 +1,14 @@ +RecordRequests = true + +# All sync uploads go through the same /workspace/import multipart code path; running +# both engine variants is unnecessary here. +EnvMatrix.DATABRICKS_BUNDLE_ENGINE = ["direct"] + +Ignore = [ + "large.bin", + "empty.txt", + "notebook.py", + "dashboard.lvdash.json", + "héllo.txt", + "with spaces.txt", +] diff --git a/acceptance/bundle/templates/default-python/classic/script b/acceptance/bundle/templates/default-python/classic/script index 6b79c6f225..1cf42e037e 100644 --- a/acceptance/bundle/templates/default-python/classic/script +++ b/acceptance/bundle/templates/default-python/classic/script @@ -13,12 +13,12 @@ $CLI bundle plan -o json -t prod > ../../out.plan_prod.$DATABRICKS_BUNDLE_ENGINE rm ../../out.requests.txt # With --plan variant we don't build artifacts, so we can filter out relevant log lines $CLI bundle deploy -t dev $(readplanarg ../../out.plan_dev.direct.json) 2>&1 | grep -vE '^Building python_artifact|^Uploading .databricks' -print_requests.py --sort '^//import-file/' '^//telemetry-ext' > ../../out.requests.dev.$DATABRICKS_BUNDLE_ENGINE.txt +print_requests.py --sort '^//workspace/import' '^//telemetry-ext' > ../../out.requests.dev.$DATABRICKS_BUNDLE_ENGINE.txt trace $CLI bundle plan -t dev # check if if there is drift trace $CLI bundle plan -t dev -o json > ../../out.plan_after_deploy_dev.$DATABRICKS_BUNDLE_ENGINE.json $CLI bundle deploy -t prod $(readplanarg ../../out.plan_prod.direct.json) 2>&1 | grep -vE '^Building python_artifact|^Uploading dist' -print_requests.py --sort '^//import-file/' '^//telemetry-ext' > ../../out.requests.prod.$DATABRICKS_BUNDLE_ENGINE.txt +print_requests.py --sort '^//workspace/import' '^//telemetry-ext' > ../../out.requests.prod.$DATABRICKS_BUNDLE_ENGINE.txt trace $CLI bundle plan -t prod # check if there is drift trace $CLI bundle plan -t prod -o json > ../../out.plan_after_deploy_prod.$DATABRICKS_BUNDLE_ENGINE.json diff --git a/acceptance/bundle/templates/default-python/serverless/script b/acceptance/bundle/templates/default-python/serverless/script index 86272b4a26..12d51337bd 100644 --- a/acceptance/bundle/templates/default-python/serverless/script +++ b/acceptance/bundle/templates/default-python/serverless/script @@ -12,12 +12,12 @@ $CLI bundle plan -o json -t prod > ../../out.plan_prod.$DATABRICKS_BUNDLE_ENGINE rm ../../out.requests.txt trace $CLI bundle deploy -t dev -print_requests.py --sort '^//import-file/' '^//telemetry-ext' > ../../out.requests.dev.$DATABRICKS_BUNDLE_ENGINE.txt +print_requests.py --sort '^//workspace/import' '^//telemetry-ext' > ../../out.requests.dev.$DATABRICKS_BUNDLE_ENGINE.txt trace $CLI bundle plan -t dev # check if if there is drift trace $CLI bundle plan -t dev -o json > ../../out.plan_after_deploy_dev.$DATABRICKS_BUNDLE_ENGINE.json trace $CLI bundle deploy -t prod -print_requests.py --sort '^//import-file/' '^//telemetry-ext' > ../../out.requests.prod.$DATABRICKS_BUNDLE_ENGINE.txt +print_requests.py --sort '^//workspace/import' '^//telemetry-ext' > ../../out.requests.prod.$DATABRICKS_BUNDLE_ENGINE.txt trace $CLI bundle plan -t prod # check if there is drift trace $CLI bundle plan -t prod -o json > ../../out.plan_after_deploy_prod.$DATABRICKS_BUNDLE_ENGINE.json diff --git a/acceptance/bundle/upload/internal_server_error/output.txt b/acceptance/bundle/upload/internal_server_error/output.txt index 5cde6aad65..24d8b2cc4a 100644 --- a/acceptance/bundle/upload/internal_server_error/output.txt +++ b/acceptance/bundle/upload/internal_server_error/output.txt @@ -1,7 +1,7 @@ -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... +Error: Failed to acquire deployment lock: Internal Server Error Error: Internal Server Error (500) -Endpoint: POST [DATABRICKS_URL]/api/2.0/workspace-files/import-file/Workspace%2FUsers%2F[USERNAME]%2F.bundle%2Ftest-bundle%2Fdefault%2Ffiles%2Ffile_to_upload.txt?overwrite=true +Endpoint: POST [DATABRICKS_URL]/api/2.0/workspace/import HTTP Status: 500 Internal Server Error API error_code: API message: Internal Server Error diff --git a/acceptance/bundle/upload/internal_server_error/test.toml b/acceptance/bundle/upload/internal_server_error/test.toml index 8ff8501b6d..b1d2b7dc49 100644 --- a/acceptance/bundle/upload/internal_server_error/test.toml +++ b/acceptance/bundle/upload/internal_server_error/test.toml @@ -1,3 +1,3 @@ [[Server]] -Pattern = "POST /api/2.0/workspace-files/import-file/Workspace/Users/tester@databricks.com/.bundle/test-bundle/default/files/file_to_upload.txt" +Pattern = "POST /api/2.0/workspace/import" Response.StatusCode = 500 diff --git a/acceptance/bundle/upload/timeout/output.txt b/acceptance/bundle/upload/timeout/output.txt index c314a24770..3b6eb72f28 100644 --- a/acceptance/bundle/upload/timeout/output.txt +++ b/acceptance/bundle/upload/timeout/output.txt @@ -1,3 +1,3 @@ -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... -Error: Post "[DATABRICKS_URL]/api/2.0/workspace-files/import-file/Workspace%2FUsers%2F[USERNAME]%2F.bundle%2Ftest-bundle%2Fdefault%2Ffiles%2Ffile_to_upload.txt?overwrite=true": request timed out after 5s of inactivity +Error: Failed to acquire deployment lock: Post "[DATABRICKS_URL]/api/2.0/workspace/import": request timed out after 5s of inactivity +Error: Post "[DATABRICKS_URL]/api/2.0/workspace/import": request timed out after 5s of inactivity diff --git a/acceptance/bundle/upload/timeout/test.toml b/acceptance/bundle/upload/timeout/test.toml index de2d8ce015..ed84a5d1db 100644 --- a/acceptance/bundle/upload/timeout/test.toml +++ b/acceptance/bundle/upload/timeout/test.toml @@ -4,5 +4,5 @@ DATABRICKS_BUNDLE_HTTP_TIMEOUT_SECONDS = "5" [[Server]] # CLI aborts after a single attempt when the HTTP timeout fires. Delay = "30s" -Pattern = "POST /api/2.0/workspace-files/import-file/Workspace/Users/tester@databricks.com/.bundle/test-bundle/default/files/file_to_upload.txt" +Pattern = "POST /api/2.0/workspace/import" Response.StatusCode = 200 diff --git a/acceptance/bundle/user_agent/output.txt b/acceptance/bundle/user_agent/output.txt index bf12862427..877fbff9c0 100644 --- a/acceptance/bundle/user_agent/output.txt +++ b/acceptance/bundle/user_agent/output.txt @@ -8,13 +8,13 @@ OK deploy.direct /api/2.0/workspace/get-status engine/direct OK deploy.direct /api/2.0/workspace/get-status engine/direct OK deploy.direct /api/2.0/workspace/get-status engine/direct OK deploy.direct /api/2.0/workspace/get-status engine/direct -OK deploy.direct /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/empty.py engine/direct -OK deploy.direct /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock engine/direct -OK deploy.direct /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json engine/direct -OK deploy.direct /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json engine/direct -OK deploy.direct /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json engine/direct OK deploy.direct /api/2.0/workspace/delete engine/direct OK deploy.direct /api/2.0/workspace/delete engine/direct +OK deploy.direct /api/2.0/workspace/import engine/direct +OK deploy.direct /api/2.0/workspace/import engine/direct +OK deploy.direct /api/2.0/workspace/import engine/direct +OK deploy.direct /api/2.0/workspace/import engine/direct +OK deploy.direct /api/2.0/workspace/import engine/direct OK deploy.direct /api/2.0/workspace/mkdirs engine/direct OK deploy.direct /api/2.1/unity-catalog/schemas engine/direct MISS deploy.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' @@ -28,13 +28,13 @@ OK deploy.terraform /api/2.0/workspace/get-status engine/terraform OK deploy.terraform /api/2.0/workspace/get-status engine/terraform OK deploy.terraform /api/2.0/workspace/get-status engine/terraform OK deploy.terraform /api/2.0/workspace/get-status engine/terraform -OK deploy.terraform /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/empty.py engine/terraform -OK deploy.terraform /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock engine/terraform -OK deploy.terraform /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json engine/terraform -OK deploy.terraform /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json engine/terraform -OK deploy.terraform /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate engine/terraform OK deploy.terraform /api/2.0/workspace/delete engine/terraform OK deploy.terraform /api/2.0/workspace/delete engine/terraform +OK deploy.terraform /api/2.0/workspace/import engine/terraform +OK deploy.terraform /api/2.0/workspace/import engine/terraform +OK deploy.terraform /api/2.0/workspace/import engine/terraform +OK deploy.terraform /api/2.0/workspace/import engine/terraform +OK deploy.terraform /api/2.0/workspace/import engine/terraform OK deploy.terraform /api/2.0/workspace/mkdirs engine/terraform MISS deploy.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS deploy.terraform /api/2.1/unity-catalog/schemas/mycatalog.myschema 'databricks-tf-provider/1.113.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/schema auth/pat' @@ -53,8 +53,8 @@ OK destroy.direct /api/2.0/workspace/get-status engine/direct OK destroy.direct /api/2.0/workspace/get-status engine/direct OK destroy.direct /api/2.0/workspace/get-status engine/direct OK destroy.direct /api/2.1/unity-catalog/schemas/mycatalog.myschema engine/direct -OK destroy.direct /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock engine/direct OK destroy.direct /api/2.0/workspace/delete engine/direct +OK destroy.direct /api/2.0/workspace/import engine/direct MISS destroy.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS destroy.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_destroy cmd-exec-id/[UUID] interactive/none auth/pat' MISS destroy.terraform /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_destroy cmd-exec-id/[UUID] interactive/none auth/pat' @@ -64,8 +64,8 @@ OK destroy.terraform /api/2.0/workspace/export engine/terraform OK destroy.terraform /api/2.0/workspace/get-status engine/terraform OK destroy.terraform /api/2.0/workspace/get-status engine/terraform OK destroy.terraform /api/2.0/workspace/get-status engine/terraform -OK destroy.terraform /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock engine/terraform OK destroy.terraform /api/2.0/workspace/delete engine/terraform +OK destroy.terraform /api/2.0/workspace/import engine/terraform MISS destroy.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS destroy.terraform /api/2.1/unity-catalog/schemas/mycatalog.myschema 'databricks-tf-provider/1.113.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/schema auth/pat' MISS destroy.terraform /api/2.1/unity-catalog/current-metastore-assignment 'databricks-tf-provider/1.113.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/schema auth/pat' diff --git a/acceptance/bundle/user_agent/simple/out.requests.deploy.direct.json b/acceptance/bundle/user_agent/simple/out.requests.deploy.direct.json index cc39aad6e9..af26270725 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.deploy.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.deploy.direct.json @@ -130,9 +130,10 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/empty.py", - "q": { - "overwrite": "true" + "path": "/api/2.0/workspace/delete", + "body": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal", + "recursive": true } } { @@ -142,15 +143,9 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", - "q": { - "overwrite": "false" - }, + "path": "/api/2.0/workspace/delete", "body": { - "ID": "[UUID]", - "AcquisitionTime": "[TIMESTAMP]", - "IsForced": false, - "User": "[USERNAME]" + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" } } { @@ -160,22 +155,14 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json", - "q": { - "overwrite": "true" - }, + "path": "/api/2.0/workspace/import", "body": { - "version": 1, - "seq": 1, - "cli_version": "[DEV_VERSION]", - "timestamp": "[TIMESTAMP]", - "files": [ - { - "local_path": "empty.py", - "is_notebook": false - } - ], - "id": "[UUID]" + "multipart_form": { + "content": "", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/empty.py" + } } } { @@ -185,29 +172,13 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json", - "q": { - "overwrite": "true" - }, + "path": "/api/2.0/workspace/import", "body": { - "version": 1, - "config": { - "bundle": { - "name": "test-bundle", - "target": "default", - "git": { - "bundle_root_path": "." - } - }, - "workspace": { - "file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" - }, - "resources": {}, - "presets": { - "source_linked_deployment": false - } - }, - "extra": {} + "multipart_form": { + "content": "{\"ID\":\"[UUID]\",\"AcquisitionTime\":\"[TIMESTAMP]\",\"IsForced\":false,\"User\":\"[USERNAME]\"}", + "format": "AUTO", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + } } } { @@ -217,23 +188,13 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json", - "q": { - "overwrite": "true" - }, + "path": "/api/2.0/workspace/import", "body": { - "state_version": 2, - "cli_version": "[DEV_VERSION]", - "lineage": "[UUID]", - "serial": 1, - "state": { - "resources.schemas.foo": { - "__id__": "mycatalog.myschema", - "state": { - "catalog_name": "mycatalog", - "name": "myschema" - } - } + "multipart_form": { + "content": "{\"version\":1,\"seq\":1,\"cli_version\":\"[DEV_VERSION]\",\"timestamp\":\"[TIMESTAMP]\",\"files\":[{\"local_path\":\"empty.py\",\"is_notebook\":false}],\"id\":\"[UUID]\"}", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json" } } } @@ -244,10 +205,14 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace/delete", + "path": "/api/2.0/workspace/import", "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal", - "recursive": true + "multipart_form": { + "content": "{\n \"version\": 1,\n \"config\": {\n \"bundle\": {\n \"name\": \"test-bundle\",\n \"target\": \"default\",\n \"git\": {\n \"bundle_root_path\": \".\"\n }\n },\n \"workspace\": {\n \"file_path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n },\n \"resources\": {},\n \"presets\": {\n \"source_linked_deployment\": false\n }\n },\n \"extra\": {}\n}", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + } } } { @@ -257,9 +222,14 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace/delete", + "path": "/api/2.0/workspace/import", "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + "multipart_form": { + "content": "{\n \"state_version\": 2,\n \"cli_version\": \"[DEV_VERSION]\",\n \"lineage\": \"[UUID]\",\n \"serial\": 1,\n \"state\": {\n \"resources.schemas.foo\": {\n \"__id__\": \"mycatalog.myschema\",\n \"state\": {\n \"catalog_name\": \"mycatalog\",\n \"name\": \"myschema\"\n }\n }\n }\n}", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json" + } } } { diff --git a/acceptance/bundle/user_agent/simple/out.requests.deploy.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.deploy.terraform.json index 435b188af3..406e02851c 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.deploy.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.deploy.terraform.json @@ -130,9 +130,10 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/empty.py", - "q": { - "overwrite": "true" + "path": "/api/2.0/workspace/delete", + "body": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal", + "recursive": true } } { @@ -142,15 +143,9 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", - "q": { - "overwrite": "false" - }, + "path": "/api/2.0/workspace/delete", "body": { - "ID": "[UUID]", - "AcquisitionTime": "[TIMESTAMP]", - "IsForced": false, - "User": "[USERNAME]" + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" } } { @@ -160,22 +155,14 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json", - "q": { - "overwrite": "true" - }, + "path": "/api/2.0/workspace/import", "body": { - "version": 1, - "seq": 1, - "cli_version": "[DEV_VERSION]", - "timestamp": "[TIMESTAMP]", - "files": [ - { - "local_path": "empty.py", - "is_notebook": false - } - ], - "id": "[UUID]" + "multipart_form": { + "content": "", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/empty.py" + } } } { @@ -185,29 +172,13 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json", - "q": { - "overwrite": "true" - }, + "path": "/api/2.0/workspace/import", "body": { - "version": 1, - "config": { - "bundle": { - "name": "test-bundle", - "target": "default", - "git": { - "bundle_root_path": "." - } - }, - "workspace": { - "file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" - }, - "resources": {}, - "presets": { - "source_linked_deployment": false - } - }, - "extra": {} + "multipart_form": { + "content": "{\"ID\":\"[UUID]\",\"AcquisitionTime\":\"[TIMESTAMP]\",\"IsForced\":false,\"User\":\"[USERNAME]\"}", + "format": "AUTO", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + } } } { @@ -217,46 +188,14 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate", - "q": { - "overwrite": "true" - }, + "path": "/api/2.0/workspace/import", "body": { - "version": 4, - "terraform_version": "1.5.5", - "serial": 1, - "lineage": "[UUID]", - "outputs": {}, - "resources": [ - { - "mode": "managed", - "type": "databricks_schema", - "name": "foo", - "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", - "instances": [ - { - "schema_version": 0, - "attributes": { - "catalog_name": "mycatalog", - "comment": null, - "enable_predictive_optimization": "INHERIT", - "force_destroy": true, - "id": "mycatalog.myschema", - "metastore_id": "[UUID]", - "name": "myschema", - "owner": "[USERNAME]", - "properties": null, - "provider_config": [], - "schema_id": "[UUID]", - "storage_root": null - }, - "sensitive_attributes": [], - "private": "bnVsbA==" - } - ] - } - ], - "check_results": null + "multipart_form": { + "content": "{\"version\":1,\"seq\":1,\"cli_version\":\"[DEV_VERSION]\",\"timestamp\":\"[TIMESTAMP]\",\"files\":[{\"local_path\":\"empty.py\",\"is_notebook\":false}],\"id\":\"[UUID]\"}", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json" + } } } { @@ -266,10 +205,14 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace/delete", + "path": "/api/2.0/workspace/import", "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal", - "recursive": true + "multipart_form": { + "content": "{\n \"version\": 1,\n \"config\": {\n \"bundle\": {\n \"name\": \"test-bundle\",\n \"target\": \"default\",\n \"git\": {\n \"bundle_root_path\": \".\"\n }\n },\n \"workspace\": {\n \"file_path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n },\n \"resources\": {},\n \"presets\": {\n \"source_linked_deployment\": false\n }\n },\n \"extra\": {}\n}", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + } } } { @@ -279,9 +222,14 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace/delete", + "path": "/api/2.0/workspace/import", "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + "multipart_form": { + "content": "{\n \"version\": 4,\n \"terraform_version\": \"1.5.5\",\n \"serial\": 1,\n \"lineage\": \"[UUID]\",\n \"outputs\": {},\n \"resources\": [\n {\n \"mode\": \"managed\",\n \"type\": \"databricks_schema\",\n \"name\": \"foo\",\n \"provider\": \"provider[\\\"registry.terraform.io/databricks/databricks\\\"]\",\n \"instances\": [\n {\n \"schema_version\": 0,\n \"attributes\": {\n \"catalog_name\": \"mycatalog\",\n \"comment\": null,\n \"enable_predictive_optimization\": \"INHERIT\",\n \"force_destroy\": true,\n \"id\": \"mycatalog.myschema\",\n \"metastore_id\": \"[UUID]\",\n \"name\": \"myschema\",\n \"owner\": \"[USERNAME]\",\n \"properties\": null,\n \"provider_config\": [],\n \"schema_id\": \"[UUID]\",\n \"storage_root\": null\n },\n \"sensitive_attributes\": [],\n \"private\": \"bnVsbA==\"\n }\n ]\n }\n ],\n \"check_results\": null\n}\n", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate" + } } } { diff --git a/acceptance/bundle/user_agent/simple/out.requests.destroy.direct.json b/acceptance/bundle/user_agent/simple/out.requests.destroy.direct.json index 24f3c5bf13..c04d397f90 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.destroy.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.destroy.direct.json @@ -121,15 +121,10 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", - "q": { - "overwrite": "false" - }, + "path": "/api/2.0/workspace/delete", "body": { - "ID": "[UUID]", - "AcquisitionTime": "[TIMESTAMP]", - "IsForced": false, - "User": "[USERNAME]" + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default", + "recursive": true } } { @@ -139,10 +134,13 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace/delete", + "path": "/api/2.0/workspace/import", "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default", - "recursive": true + "multipart_form": { + "content": "{\"ID\":\"[UUID]\",\"AcquisitionTime\":\"[TIMESTAMP]\",\"IsForced\":false,\"User\":\"[USERNAME]\"}", + "format": "AUTO", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + } } } { diff --git a/acceptance/bundle/user_agent/simple/out.requests.destroy.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.destroy.terraform.json index f8ab210ec7..8c86fbe4ac 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.destroy.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.destroy.terraform.json @@ -100,15 +100,10 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", - "q": { - "overwrite": "false" - }, + "path": "/api/2.0/workspace/delete", "body": { - "ID": "[UUID]", - "AcquisitionTime": "[TIMESTAMP]", - "IsForced": false, - "User": "[USERNAME]" + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default", + "recursive": true } } { @@ -118,10 +113,13 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace/delete", + "path": "/api/2.0/workspace/import", "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default", - "recursive": true + "multipart_form": { + "content": "{\"ID\":\"[UUID]\",\"AcquisitionTime\":\"[TIMESTAMP]\",\"IsForced\":false,\"User\":\"[USERNAME]\"}", + "format": "AUTO", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + } } } { diff --git a/acceptance/bundle/validate/sync_patterns/out.sync.txt b/acceptance/bundle/validate/sync_patterns/out.sync.txt index ac5aba9ab2..fd939c9721 100644 --- a/acceptance/bundle/validate/sync_patterns/out.sync.txt +++ b/acceptance/bundle/validate/sync_patterns/out.sync.txt @@ -1,7 +1,12 @@ { "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/conf/dir/test.yml", - "q": { - "overwrite": "true" + "path": "/api/2.0/workspace/import", + "body": { + "multipart_form": { + "content": "", + "format": "AUTO", + "overwrite": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/conf/dir/test.yml" + } } } diff --git a/acceptance/bundle/validate/sync_patterns/script b/acceptance/bundle/validate/sync_patterns/script index d2aae85444..ce8e5cbb1b 100644 --- a/acceptance/bundle/validate/sync_patterns/script +++ b/acceptance/bundle/validate/sync_patterns/script @@ -1,5 +1,5 @@ trace $CLI bundle validate trace $CLI bundle validate -o json | jq '.sync' trace $CLI bundle deploy -jq 'select(.path | test("dir/test.yml"))' out.requests.txt > out.sync.txt +jq 'select(.body.multipart_form.path | strings | test("dir/test.yml"))' out.requests.txt > out.sync.txt rm out.requests.txt diff --git a/acceptance/internal/prepare_server.go b/acceptance/internal/prepare_server.go index 702b4e145e..f81b07f7f4 100644 --- a/acceptance/internal/prepare_server.go +++ b/acceptance/internal/prepare_server.go @@ -1,8 +1,12 @@ package internal import ( + "bytes" "encoding/json" "fmt" + "io" + "mime" + "mime/multipart" "net/http" "os" "path/filepath" @@ -323,6 +327,11 @@ func getLoggedRequest(req *testserver.Request, includedHeaders []string) LoggedR if json.Valid(req.Body) { result.Body = json.RawMessage(req.Body) + } else if normalized, ok := normalizeMultipartBody(req); ok { + // Multipart bodies contain a randomly generated boundary string and binary + // content; record a normalized form (sorted form-field names with sizes for + // file parts) so recorded requests stay deterministic and reviewable. + result.Body = normalized } else { result.RawBody = string(req.Body) } @@ -330,6 +339,55 @@ func getLoggedRequest(req *testserver.Request, includedHeaders []string) LoggedR return result } +// normalizeMultipartBody returns a deterministic representation of a multipart +// form body if the request's Content-Type is multipart/*. The second return +// value is false if the body is not multipart or cannot be parsed. +// +// Text parts are recorded as their literal content so reviewers can read what +// was uploaded; existing global UNIX_TIME / UUID / USERNAME replacements +// normalize the timestamp / id / email fields embedded in deploy.lock and +// similar payloads. Non-UTF-8 (binary) parts and parts whose serialized +// content would bloat the recording past multipartContentLimit are +// summarized as "[binary content N bytes]". +func normalizeMultipartBody(req *testserver.Request) (any, bool) { + contentType := req.Headers.Get("Content-Type") + mediaType, params, err := mime.ParseMediaType(contentType) + if err != nil || !strings.HasPrefix(mediaType, "multipart/") { + return nil, false + } + boundary := params["boundary"] + if boundary == "" { + return nil, false + } + mr := multipart.NewReader(bytes.NewReader(req.Body), boundary) + parts := map[string]any{} + for { + part, err := mr.NextPart() + if err == io.EOF { + break + } + if err != nil { + return nil, false + } + data, err := io.ReadAll(part) + if err != nil { + return nil, false + } + name := part.FormName() + if !utf8.Valid(data) || len(data) > multipartContentLimit { + parts[name] = fmt.Sprintf("[binary content %d bytes]", len(data)) + continue + } + parts[name] = string(data) + } + return map[string]any{"multipart_form": parts}, true +} + +// Multipart parts larger than this limit are summarized as a size placeholder +// to keep recorded fixtures small. Reviewers care about *what* was uploaded +// (path, format, overwrite flag), not the bytes themselves, for large blobs. +const multipartContentLimit = 4096 + func filterHeaders(h http.Header, includedHeaders []string) http.Header { headers := make(http.Header) for k, v := range h { diff --git a/libs/filer/workspace_files_client.go b/libs/filer/workspace_files_client.go index 54103f8158..d7ca720daf 100644 --- a/libs/filer/workspace_files_client.go +++ b/libs/filer/workspace_files_client.go @@ -9,7 +9,6 @@ import ( "io" "io/fs" "net/http" - "net/url" "path" "slices" "strings" @@ -157,35 +156,34 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io return err } - // Remove leading "/" so we can use it in the URL. - overwrite := slices.Contains(mode, OverwriteIfExists) - urlPath := fmt.Sprintf( - "/api/2.0/workspace-files/import-file/%s?overwrite=%t", - url.PathEscape(strings.TrimLeft(absPath, "/")), - overwrite, - ) - // Buffer the file contents because we may need to retry below and we cannot read twice. body, err := io.ReadAll(reader) if err != nil { return err } - err = w.apiClient.Do(ctx, http.MethodPost, urlPath, w.orgIDHeaders(), nil, body, nil) + // Use Workspace.Upload (multipart /api/2.0/workspace/import) instead of the + // JSON-body variant of the same endpoint, which caps payloads at 10 MiB for + // AUTO format (databricks.webapp.autoExportFormatLimitBytes). The multipart + // variant accepts up to 200 MiB (databricks.workspaceFilesystem.maxImportSizeBytes), + // matching the legacy /workspace-files/import-file endpoint we are migrating + // away from. + overwrite := slices.Contains(mode, OverwriteIfExists) + uploadOpts := []func(*workspace.Import){ + workspace.UploadFormat(workspace.ImportFormatAuto), + } + if overwrite { + uploadOpts = append(uploadOpts, workspace.UploadOverwrite()) + } + err = w.workspaceClient.Workspace.Upload(ctx, absPath, bytes.NewReader(body), uploadOpts...) // Return early on success. if err == nil { return nil } - // Special handling of this error only if it is an API error. - var aerr *apierr.APIError - if !errors.As(err, &aerr) { - return err - } - - // This API returns a 404 if the parent directory does not exist. - if aerr.StatusCode == http.StatusNotFound { + // Parent directory does not exist. + if errors.Is(err, apierr.ErrNotFound) { if !slices.Contains(mode, CreateParentDirectories) { return noSuchDirectoryError{path.Dir(absPath)} } @@ -193,7 +191,7 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io // Create parent directory. err = w.workspaceClient.Workspace.MkdirsByPath(ctx, path.Dir(absPath)) //nolint:staticcheck // Deprecated in SDK v0.127.0. Migration to WorkspaceHierarchyService tracked separately. if err != nil { - if errors.As(err, &aerr) && aerr.StatusCode == http.StatusForbidden { + if errors.Is(err, apierr.ErrPermissionDenied) { return permissionError{absPath} } return fmt.Errorf("unable to mkdir to write file %s: %w", absPath, err) @@ -203,23 +201,27 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io return w.Write(ctx, name, bytes.NewReader(body), sliceWithout(mode, CreateParentDirectories)...) } - // This API returns 409 if the file already exists, when the object type is file - if aerr.StatusCode == http.StatusConflict { + // File already exists at the path. The /workspace/import endpoint reports this + // with two different error_codes depending on whether the conflict was detected + // sequentially (400 RESOURCE_ALREADY_EXISTS) or under concurrent contention + // (409 ALREADY_EXISTS, observed in TestLock). Both are already-exists from the + // caller's perspective. + // + // Existing-object-with-mismatched-node-type (e.g. uploading a regular .py when a + // NOTEBOOK is at the path) surfaces as 400 INVALID_PARAMETER_VALUE with a + // "Requested node type" message — also already-exists from the caller's perspective. + if errors.Is(err, apierr.ErrResourceAlreadyExists) || errors.Is(err, apierr.ErrAlreadyExists) { return fileAlreadyExistsError{absPath} } - - // This API returns 400 if the file already exists when the object type is notebook. - // Both the historical "Path () already exists." format and the newer - // "RESOURCE_ALREADY_EXISTS: already exists. ..." format end with the same - // "already exists." marker; the JSON error_code is empty in both. The new format - // might not have been rolled out to all workspaces yet, so we anchor on the shared - // marker and return absPath rather than parsing the message. - if aerr.StatusCode == http.StatusBadRequest && strings.Contains(aerr.Message, "already exists.") { - return fileAlreadyExistsError{absPath} + if errors.Is(err, apierr.ErrInvalidParameterValue) { + var aerr *apierr.APIError + if errors.As(err, &aerr) && strings.Contains(aerr.Message, "Requested node type") { + return fileAlreadyExistsError{absPath} + } } - // This API returns StatusForbidden when you have read access but don't have write access to a file - if aerr.StatusCode == http.StatusForbidden { + // Caller has read access but no write access. + if errors.Is(err, apierr.ErrPermissionDenied) { return permissionError{absPath} } @@ -273,19 +275,14 @@ func (w *WorkspaceFilesClient) Delete(ctx context.Context, name string, mode ... return nil } - // Special handling of this error only if it is an API error. - var aerr *apierr.APIError - if !errors.As(err, &aerr) { - return err + if errors.Is(err, apierr.ErrNotFound) { + return fileDoesNotExistError{absPath} } - switch aerr.StatusCode { - case http.StatusBadRequest: - if aerr.ErrorCode == "DIRECTORY_NOT_EMPTY" { - return directoryNotEmptyError{absPath} - } - case http.StatusNotFound: - return fileDoesNotExistError{absPath} + // No SDK sentinel for DIRECTORY_NOT_EMPTY; match the error_code directly. + var aerr *apierr.APIError + if errors.As(err, &aerr) && aerr.ErrorCode == "DIRECTORY_NOT_EMPTY" { + return directoryNotEmptyError{absPath} } return err @@ -306,15 +303,9 @@ func (w *WorkspaceFilesClient) ReadDir(ctx context.Context, name string) ([]fs.D } if err != nil { - // If we got an API error we deal with it below. - var aerr *apierr.APIError - if !errors.As(err, &aerr) { - return nil, err - } - // NOTE: This API returns a 404 if the specified path does not exist, // but can also do so if we don't have read access. - if aerr.StatusCode == http.StatusNotFound { + if errors.Is(err, apierr.ErrNotFound) { return nil, noSuchDirectoryError{path.Dir(absPath)} } return nil, err @@ -358,14 +349,8 @@ func (w *WorkspaceFilesClient) Stat(ctx context.Context, name string) (fs.FileIn &stat, ) if err != nil { - // If we got an API error we deal with it below. - var aerr *apierr.APIError - if !errors.As(err, &aerr) { - return nil, err - } - // This API returns a 404 if the specified path does not exist. - if aerr.StatusCode == http.StatusNotFound { + if errors.Is(err, apierr.ErrNotFound) { return nil, fileDoesNotExistError{absPath} } } diff --git a/libs/filer/workspace_files_client_test.go b/libs/filer/workspace_files_client_test.go index 2603d31d6d..e37389f8ca 100644 --- a/libs/filer/workspace_files_client_test.go +++ b/libs/filer/workspace_files_client_test.go @@ -1,15 +1,23 @@ package filer import ( + "bytes" + "context" "encoding/json" + "io" "io/fs" + "net/http" + "strings" "testing" "time" "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/config" + "github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" ) @@ -95,6 +103,173 @@ func TestWorkspaceFilesClientOrgIDHeaders(t *testing.T) { }) } +func TestWorkspaceFilesClientWriteSuccess(t *testing.T) { + tests := []struct { + name string + modes []WriteMode + expectOverride bool + }{ + { + name: "no overwrite", + modes: nil, + expectOverride: false, + }, + { + name: "overwrite", + modes: []WriteMode{OverwriteIfExists}, + expectOverride: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mw := mocks.NewMockWorkspaceClient(t) + workspaceApi := mw.GetMockWorkspaceAPI() + + workspaceApi.EXPECT().Upload( + mock.Anything, + "/dir/file.txt", + mock.Anything, + mock.Anything, + mock.Anything, + ).RunAndReturn(func(_ context.Context, _ string, r io.Reader, opts ...func(*workspace.Import)) error { + body, err := io.ReadAll(r) + require.NoError(t, err) + assert.Equal(t, "hello", string(body)) + + i := &workspace.Import{} + for _, opt := range opts { + opt(i) + } + assert.Equal(t, workspace.ImportFormatAuto, i.Format) + assert.Equal(t, tc.expectOverride, i.Overwrite) + return nil + }).Once() + + c := WorkspaceFilesClient{ + workspaceClient: mw.WorkspaceClient, + root: NewWorkspaceRootPath("/dir"), + } + err := c.Write(t.Context(), "file.txt", strings.NewReader("hello"), tc.modes...) + require.NoError(t, err) + }) + } +} + +func TestWorkspaceFilesClientWriteErrorMapping(t *testing.T) { + tests := []struct { + name string + mode []WriteMode + apiErr *apierr.APIError + expectErrTarget any + }{ + { + name: "404 without create-parent maps to noSuchDirectoryError", + apiErr: &apierr.APIError{StatusCode: http.StatusNotFound, Message: "not found"}, + expectErrTarget: noSuchDirectoryError{}, + }, + { + name: "400 RESOURCE_ALREADY_EXISTS maps to fileAlreadyExistsError", + apiErr: &apierr.APIError{ + StatusCode: http.StatusBadRequest, + ErrorCode: "RESOURCE_ALREADY_EXISTS", + Message: "/dir/file.txt already exists. Please pass overwrite=true to overwrite it.", + }, + expectErrTarget: fileAlreadyExistsError{}, + }, + { + name: "409 ALREADY_EXISTS (concurrent contention) maps to fileAlreadyExistsError", + apiErr: &apierr.APIError{ + StatusCode: http.StatusConflict, + ErrorCode: "ALREADY_EXISTS", + Message: "Node with name /dir/file.txt already exists. Please pass overwrite=true to update it.", + }, + expectErrTarget: fileAlreadyExistsError{}, + }, + { + name: "400 INVALID_PARAMETER_VALUE node type mismatch maps to fileAlreadyExistsError", + apiErr: &apierr.APIError{ + StatusCode: http.StatusBadRequest, + ErrorCode: "INVALID_PARAMETER_VALUE", + Message: "Requested node type [FILE] is different from the existing node type [NOTEBOOK]", + }, + expectErrTarget: fileAlreadyExistsError{}, + }, + { + name: "400 INVALID_PARAMETER_VALUE other message passes through", + apiErr: &apierr.APIError{ + StatusCode: http.StatusBadRequest, + ErrorCode: "INVALID_PARAMETER_VALUE", + Message: "some other validation failure", + }, + expectErrTarget: nil, + }, + { + name: "403 maps to permissionError", + apiErr: &apierr.APIError{StatusCode: http.StatusForbidden, Message: "denied"}, + expectErrTarget: permissionError{}, + }, + { + name: "500 passes through", + apiErr: &apierr.APIError{StatusCode: http.StatusInternalServerError, Message: "boom"}, + expectErrTarget: nil, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mw := mocks.NewMockWorkspaceClient(t) + workspaceApi := mw.GetMockWorkspaceAPI() + workspaceApi.EXPECT().Upload( + mock.Anything, "/dir/file.txt", mock.Anything, mock.Anything, mock.Anything, + ).Return(tc.apiErr).Once() + + c := WorkspaceFilesClient{ + workspaceClient: mw.WorkspaceClient, + root: NewWorkspaceRootPath("/dir"), + } + err := c.Write(t.Context(), "file.txt", bytes.NewReader([]byte("data")), tc.mode...) + require.Error(t, err) + switch target := tc.expectErrTarget.(type) { + case noSuchDirectoryError: + assert.ErrorAs(t, err, &target) + case fileAlreadyExistsError: + assert.ErrorAs(t, err, &target) + case permissionError: + assert.ErrorAs(t, err, &target) + case nil: + // passthrough — same APIError pointer + var aerr *apierr.APIError + require.ErrorAs(t, err, &aerr) + assert.Equal(t, tc.apiErr.StatusCode, aerr.StatusCode) + } + }) + } +} + +func TestWorkspaceFilesClientWriteCreatesParentDirectories(t *testing.T) { + mw := mocks.NewMockWorkspaceClient(t) + workspaceApi := mw.GetMockWorkspaceAPI() + + // First Upload returns 404, second returns success after MkdirsByPath. + workspaceApi.EXPECT().Upload( + mock.Anything, "/dir/sub/file.txt", mock.Anything, mock.Anything, mock.Anything, + ).Return(&apierr.APIError{StatusCode: http.StatusNotFound, Message: "not found"}).Once() + + workspaceApi.EXPECT().MkdirsByPath(mock.Anything, "/dir/sub").Return(nil).Once() + + workspaceApi.EXPECT().Upload( + mock.Anything, "/dir/sub/file.txt", mock.Anything, mock.Anything, mock.Anything, + ).Return(nil).Once() + + c := WorkspaceFilesClient{ + workspaceClient: mw.WorkspaceClient, + root: NewWorkspaceRootPath("/dir"), + } + err := c.Write(t.Context(), "sub/file.txt", strings.NewReader("data"), CreateParentDirectories) + require.NoError(t, err) +} + func TestWorkspaceFilesClient_wsfsUnmarshal(t *testing.T) { payload := ` { diff --git a/libs/testserver/handlers.go b/libs/testserver/handlers.go index 8bd5339184..8f3983fb07 100644 --- a/libs/testserver/handlers.go +++ b/libs/testserver/handlers.go @@ -1,9 +1,13 @@ package testserver import ( + "bytes" "encoding/base64" "encoding/json" "fmt" + "io" + "mime" + "mime/multipart" "net/http" "path" "strings" @@ -116,6 +120,72 @@ func AddDefaultHandlers(server *Server) { }) server.Handle("POST", "/api/2.0/workspace/import", func(req Request) any { + // /workspace/import accepts both a JSON body (matching workspace.Import) and a + // multipart form body. The multipart variant is what databricks-sdk-go's + // Workspace.Upload uses; the JSON variant is kept for back-compat with anything + // that still hits Workspace.Import directly. + contentType := req.Headers.Get("Content-Type") + mediaType, params, _ := mime.ParseMediaType(contentType) + if strings.HasPrefix(mediaType, "multipart/") { + mr := multipart.NewReader(bytes.NewReader(req.Body), params["boundary"]) + var ( + filePath string + content []byte + format string + overwrite bool + ) + for { + part, err := mr.NextPart() + if err == io.EOF { + break + } + if err != nil { + return Response{ + Body: fmt.Sprintf("internal error: %s", err), + StatusCode: http.StatusInternalServerError, + } + } + data, err := io.ReadAll(part) + if err != nil { + return Response{ + Body: fmt.Sprintf("internal error: %s", err), + StatusCode: http.StatusInternalServerError, + } + } + switch part.FormName() { + case "path": + filePath = string(data) + case "content": + content = data + case "format": + format = string(data) + case "overwrite": + overwrite = string(data) == "true" + } + } + + if format != "" && format != string(workspace.ImportFormatAuto) { + return Response{ + Body: "internal error: The test server only supports auto format.", + StatusCode: http.StatusInternalServerError, + } + } + + // Translate any 409 from the shared fake into the 400 + errorCode + // RESOURCE_ALREADY_EXISTS shape returned by the real /workspace/import endpoint. + resp := req.Workspace.WorkspaceFilesImportFile(filePath, content, overwrite) + if resp.StatusCode == http.StatusConflict { + return Response{ + StatusCode: http.StatusBadRequest, + Body: map[string]string{ + "error_code": "RESOURCE_ALREADY_EXISTS", + "message": fmt.Sprintf("Path (%s) already exists.", filePath), + }, + } + } + return resp + } + var request workspace.Import err := json.Unmarshal(req.Body, &request) if err != nil {