Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Don't upload chunks that are already on the server (fixed version) #1660

Merged
merged 4 commits into from
Jun 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@

"You know what they say. Fool me once, strike one, but fool me twice... strike three." — Michael Scott

## Unreleased

### Various fixes and improvements

- feat: Don't upload chunks that are already on the server (fixed version) (#1660)

## 2.19.3

### Various fixes and improvements
Expand Down
5 changes: 5 additions & 0 deletions src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2610,6 +2610,10 @@ pub enum ChunkUploadCapability {
/// Chunked upload of standalone artifact bundles
ArtifactBundles,

/// Like `ArtifactBundles`, but with deduplicated chunk
/// upload.
ArtifactBundlesV2,

/// Upload of PDBs and debug id overrides
Pdbs,

Expand Down Expand Up @@ -2638,6 +2642,7 @@ impl<'de> Deserialize<'de> for ChunkUploadCapability {
"debug_files" => ChunkUploadCapability::DebugFiles,
"release_files" => ChunkUploadCapability::ReleaseFiles,
"artifact_bundles" => ChunkUploadCapability::ArtifactBundles,
"artifact_bundles_v2" => ChunkUploadCapability::ArtifactBundlesV2,
"pdbs" => ChunkUploadCapability::Pdbs,
"portablepdbs" => ChunkUploadCapability::PortablePdbs,
"sources" => ChunkUploadCapability::Sources,
Expand Down
111 changes: 75 additions & 36 deletions src/utils/file_upload.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ pub fn initialize_legacy_release_upload(context: &UploadContext) -> Result<()> {
if context.project.is_some()
&& context.chunk_upload_options.map_or(false, |x| {
x.supports(ChunkUploadCapability::ArtifactBundles)
|| x.supports(ChunkUploadCapability::ArtifactBundlesV2)
})
{
return Ok(());
Expand Down Expand Up @@ -274,39 +275,12 @@ fn upload_files_parallel(
Ok(())
}

fn upload_files_chunked(
fn poll_assemble(
checksum: Digest,
chunks: &[Digest],
context: &UploadContext,
files: &SourceFiles,
options: &ChunkUploadOptions,
) -> Result<()> {
let archive = build_artifact_bundle(context, files, None)?;

let progress_style =
ProgressStyle::default_spinner().template("{spinner} Optimizing bundle for upload...");

let pb = ProgressBar::new_spinner();
pb.enable_steady_tick(100);
pb.set_style(progress_style);

let view = ByteView::open(archive.path())?;
let (checksum, checksums) = get_sha1_checksums(&view, options.chunk_size)?;
let chunks = view
.chunks(options.chunk_size as usize)
.zip(checksums.iter())
.map(|(data, checksum)| Chunk((*checksum, data)))
.collect::<Vec<_>>();

pb.finish_with_duration("Optimizing");

let progress_style = ProgressStyle::default_bar().template(&format!(
"{} Uploading files...\
\n{{wide_bar}} {{bytes}}/{{total_bytes}} ({{eta}})",
style(">").dim(),
));

upload_chunks(&chunks, options, progress_style)?;
println!("{} Uploaded files to Sentry", style(">").dim());

let progress_style = ProgressStyle::default_spinner().template("{spinner} Processing files...");

let pb = ProgressBar::new_spinner();
Expand All @@ -320,21 +294,22 @@ fn upload_files_chunked(
};

let api = Api::current();
let use_artifact_bundle = (options.supports(ChunkUploadCapability::ArtifactBundles)
|| options.supports(ChunkUploadCapability::ArtifactBundlesV2))
&& context.project.is_some();
let response = loop {
// prefer standalone artifact bundle upload over legacy release based upload
let response = if options.supports(ChunkUploadCapability::ArtifactBundles)
&& context.project.is_some()
{
let response = if use_artifact_bundle {
api.assemble_artifact_bundle(
context.org,
vec![context.project.unwrap().to_string()],
checksum,
&checksums,
chunks,
context.release,
context.dist,
)?
} else {
api.assemble_release_artifacts(context.org, context.release()?, checksum, &checksums)?
api.assemble_release_artifacts(context.org, context.release()?, checksum, chunks)?
};

// Poll until there is a response, unless the user has specified to skip polling. In
Expand Down Expand Up @@ -376,6 +351,65 @@ fn upload_files_chunked(
Ok(())
}

fn upload_files_chunked(
context: &UploadContext,
files: &SourceFiles,
options: &ChunkUploadOptions,
) -> Result<()> {
let archive = build_artifact_bundle(context, files, None)?;

let progress_style =
ProgressStyle::default_spinner().template("{spinner} Optimizing bundle for upload...");

let pb = ProgressBar::new_spinner();
pb.enable_steady_tick(100);
pb.set_style(progress_style);

let view = ByteView::open(archive.path())?;
let (checksum, checksums) = get_sha1_checksums(&view, options.chunk_size)?;
let mut chunks = view
.chunks(options.chunk_size as usize)
.zip(checksums.iter())
.map(|(data, checksum)| Chunk((*checksum, data)))
.collect::<Vec<_>>();

pb.finish_with_duration("Optimizing");

let progress_style = ProgressStyle::default_bar().template(&format!(
"{} Uploading files...\
\n{{wide_bar}} {{bytes}}/{{total_bytes}} ({{eta}})",
style(">").dim(),
));

// Filter out chunks that are already on the server. This only matters if the server supports
// `ArtifactBundlesV2`, otherwise the `missing_chunks` field is meaningless.
if options.supports(ChunkUploadCapability::ArtifactBundlesV2) && context.project.is_some() {
let api = Api::current();
let response = api.assemble_artifact_bundle(
context.org,
vec![context.project.unwrap().to_string()],
checksum,
&checksums,
context.release,
context.dist,
)?;
chunks.retain(|Chunk((digest, _))| response.missing_chunks.contains(digest));
};

upload_chunks(&chunks, options, progress_style)?;

if !chunks.is_empty() {
println!("{} Uploaded files to Sentry", style(">").dim());
poll_assemble(checksum, &checksums, context, options)
} else {
println!(
"{} Nothing to upload, all files are on the server",
style(">").dim()
);
Ok(())
}
}

fn build_debug_id(files: &SourceFiles) -> DebugId {
let mut sorted_files = Vec::from_iter(files);
sorted_files.sort_by_key(|x| x.0);
Expand Down Expand Up @@ -524,7 +558,12 @@ fn print_upload_context_details(context: &UploadContext) {
);
let upload_type = match context.chunk_upload_options {
None => "single file",
Some(opts) if opts.supports(ChunkUploadCapability::ArtifactBundles) => "artifact bundle",
Some(opts)
if opts.supports(ChunkUploadCapability::ArtifactBundles)
|| opts.supports(ChunkUploadCapability::ArtifactBundlesV2) =>
{
"artifact bundle"
}
_ => "release bundle",
};
println!(
Expand Down
21 changes: 14 additions & 7 deletions tests/integration/debug_files/bundle_jvm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ fn command_bundle_jvm_out_not_found_creates_dir() {
testcase_cwd_path.join("jvm"),
)
.unwrap();
let _upload_endpoints = mock_common_upload_endpoints(ServerBehavior::Legacy);
let _upload_endpoints =
mock_common_upload_endpoints(ServerBehavior::Legacy, Default::default());
register_test("debug_files/debug_files-bundle-jvm-output-not-found.trycmd");
}

Expand All @@ -35,20 +36,23 @@ fn command_bundle_jvm_fails_out_is_file() {
}
copy_recursively("tests/integration/_fixtures/jvm/", testcase_cwd_path).unwrap();
write(testcase_cwd_path.join("file.txt"), "some file content").unwrap();
let _upload_endpoints = mock_common_upload_endpoints(ServerBehavior::Legacy);
let _upload_endpoints =
mock_common_upload_endpoints(ServerBehavior::Legacy, Default::default());

register_test("debug_files/debug_files-bundle-jvm-output-is-file.trycmd");
}

#[test]
fn command_bundle_jvm_fails_input_not_found() {
let _upload_endpoints = mock_common_upload_endpoints(ServerBehavior::Legacy);
let _upload_endpoints =
mock_common_upload_endpoints(ServerBehavior::Legacy, Default::default());
register_test("debug_files/debug_files-bundle-jvm-input-not-found.trycmd");
}

#[test]
fn command_bundle_jvm_fails_input_is_file() {
let _upload_endpoints = mock_common_upload_endpoints(ServerBehavior::Legacy);
let _upload_endpoints =
mock_common_upload_endpoints(ServerBehavior::Legacy, Default::default());
register_test("debug_files/debug_files-bundle-jvm-input-is-file.trycmd");
}

Expand All @@ -62,13 +66,15 @@ fn command_bundle_jvm_input_dir_empty() {
}
copy_recursively("tests/integration/_fixtures/jvm/", testcase_cwd_path).unwrap();
create_dir(testcase_cwd_path.join("empty-dir")).unwrap();
let _upload_endpoints = mock_common_upload_endpoints(ServerBehavior::Legacy);
let _upload_endpoints =
mock_common_upload_endpoints(ServerBehavior::Legacy, Default::default());
register_test("debug_files/debug_files-bundle-jvm-input-dir-empty.trycmd");
}

#[test]
fn command_bundle_jvm_fails_invalid_uuid() {
let _upload_endpoints = mock_common_upload_endpoints(ServerBehavior::Legacy);
let _upload_endpoints =
mock_common_upload_endpoints(ServerBehavior::Legacy, Default::default());
register_test("debug_files/debug_files-bundle-jvm-invalid-uuid.trycmd");
}

Expand All @@ -79,6 +85,7 @@ fn command_bundle_jvm() {
remove_dir_all(testcase_cwd_path).unwrap();
}
copy_recursively("tests/integration/_fixtures/jvm/", testcase_cwd_path).unwrap();
let _upload_endpoints = mock_common_upload_endpoints(ServerBehavior::Legacy);
let _upload_endpoints =
mock_common_upload_endpoints(ServerBehavior::Legacy, Default::default());
register_test("debug_files/debug_files-bundle-jvm.trycmd");
}
41 changes: 36 additions & 5 deletions tests/integration/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,10 +119,33 @@ pub fn copy_recursively(source: impl AsRef<Path>, destination: impl AsRef<Path>)
pub enum ServerBehavior {
Legacy,
Modern,
ModernV2,
}

#[derive(Debug)]
pub struct ChunkOptions {
chunk_size: usize,
missing_chunks: Vec<String>,
}

impl Default for ChunkOptions {
fn default() -> Self {
Self {
chunk_size: 8388608,
missing_chunks: vec![],
}
}
}

// Endpoints need to be bound, as they need to live long enough for test to finish
pub fn mock_common_upload_endpoints(behavior: ServerBehavior) -> Vec<Mock> {
pub fn mock_common_upload_endpoints(
behavior: ServerBehavior,
chunk_options: ChunkOptions,
) -> Vec<Mock> {
let ChunkOptions {
chunk_size,
missing_chunks,
} = chunk_options;
let (accept, release_request_count, assemble_endpoint) = match behavior {
ServerBehavior::Legacy => (
"\"release_files\"",
Expand All @@ -134,11 +157,16 @@ pub fn mock_common_upload_endpoints(behavior: ServerBehavior) -> Vec<Mock> {
0,
"/api/0/organizations/wat-org/artifactbundle/assemble/",
),
ServerBehavior::ModernV2 => (
"\"release_files\", \"artifact_bundles_v2\"",
0,
"/api/0/organizations/wat-org/artifactbundle/assemble/",
),
};
let chunk_upload_response = format!(
"{{
\"url\": \"{}/api/0/organizations/wat-org/chunk-upload/\",
\"chunkSize\": 8388608,
\"chunkSize\": {chunk_size},
\"chunksPerRequest\": 64,
\"maxRequestSize\": 33554432,
\"concurrency\": 8,
Expand All @@ -165,9 +193,12 @@ pub fn mock_common_upload_endpoints(behavior: ServerBehavior) -> Vec<Mock> {
.with_response_body("[]"),
),
mock_endpoint(
EndpointOptions::new("POST", assemble_endpoint, 200)
.with_response_body(r#"{"state":"created","missingChunks":[]}"#),
),
EndpointOptions::new("POST", assemble_endpoint, 200).with_response_body(format!(
r#"{{"state":"created","missingChunks":{}}}"#,
serde_json::to_string(&missing_chunks).unwrap()
)),
)
.expect_at_least(1),
]
}

Expand Down
Loading