Skip to content

Commit

Permalink
Add --blob-meta and --blob-cache arg use to generate raw blob cache a…
Browse files Browse the repository at this point in the history
…nd meta

generate blob cache and blob meta through the —blob-met and
—blob-cache parameters, so that nydusd can be started directly
from these two files without going to the backend to download.
this can improve the performance of data loading in localfs mode.

Signed-off-by: zyfjeff <[email protected]>
  • Loading branch information
zyfjeff committed Sep 25, 2023
1 parent b777564 commit 8f42e29
Show file tree
Hide file tree
Showing 8 changed files with 130 additions and 11 deletions.
2 changes: 2 additions & 0 deletions builder/src/compact.rs
Original file line number Diff line number Diff line change
Expand Up @@ -610,6 +610,8 @@ impl BlobCompactor {
false,
Features::new(),
false,
None,
None,
);
let mut bootstrap_mgr =
BootstrapManager::new(Some(ArtifactStorage::SingleFile(d_bootstrap)), None);
Expand Down
18 changes: 14 additions & 4 deletions builder/src/core/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@
// SPDX-License-Identifier: Apache-2.0

use std::borrow::Cow;
use std::io::Write;
use std::io::{Seek, Write};
use std::mem::size_of;
use std::slice;

use anyhow::{Context, Result};
use nydus_rafs::metadata::RAFS_MAX_CHUNK_SIZE;
use nydus_storage::device::BlobFeatures;
use nydus_storage::meta::{toc, BlobMetaChunkArray};
use nydus_storage::meta::{toc, BlobCompressionContextHeader, BlobMetaChunkArray};
use nydus_utils::digest::{self, DigestHasher, RafsDigest};
use nydus_utils::{compress, crypt};
use nydus_utils::{compress, crypt, try_round_up_4k};
use sha2::digest::Digest;

use super::layout::BlobLayout;
Expand Down Expand Up @@ -194,7 +195,6 @@ impl Blob {
} else if ctx.blob_tar_reader.is_some() {
header.set_separate_blob(true);
};

let mut compressor = Self::get_compression_algorithm_for_meta(ctx);
let (compressed_data, compressed) = compress::compress(ci_data, compressor)
.with_context(|| "failed to compress blob chunk info array".to_string())?;
Expand Down Expand Up @@ -223,6 +223,16 @@ impl Blob {
}

blob_ctx.blob_meta_header = header;
if let Some(meta_writer) = ctx.blob_meta_writer.as_ref() {
let mut meta = meta_writer.lock().unwrap();
let aligned_uncompressed_size = try_round_up_4k(uncompressed_size as u64).unwrap();
meta.set_len(
aligned_uncompressed_size + size_of::<BlobCompressionContextHeader>() as u64,
)?;
meta.write_all(ci_data)?;
meta.seek(std::io::SeekFrom::Start(aligned_uncompressed_size))?;
meta.write_all(header.as_bytes())?;
}
let encrypted_header =
crypt::encrypt_with_context(header.as_bytes(), cipher_obj, cipher_ctx, encrypt)?;
let header_size = encrypted_header.len();
Expand Down
16 changes: 15 additions & 1 deletion builder/src/core/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ impl Write for ArtifactMemoryWriter {
}
}

struct ArtifactFileWriter(ArtifactWriter);
pub struct ArtifactFileWriter(pub ArtifactWriter);

impl RafsIoWrite for ArtifactFileWriter {
fn as_any(&self) -> &dyn Any {
Expand All @@ -215,6 +215,12 @@ impl RafsIoWrite for ArtifactFileWriter {
}
}

impl ArtifactFileWriter {
pub fn set_len(&mut self, s: u64) -> std::io::Result<()> {
self.0.file.get_mut().set_len(s)
}
}

impl Seek for ArtifactFileWriter {
fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
self.0.file.seek(pos)
Expand Down Expand Up @@ -1182,6 +1188,8 @@ pub struct BuildContext {

pub features: Features,
pub configuration: Arc<ConfigV2>,
pub blob_cache_writer: Option<Mutex<ArtifactFileWriter>>,
pub blob_meta_writer: Option<Mutex<ArtifactFileWriter>>,
}

impl BuildContext {
Expand All @@ -1201,6 +1209,8 @@ impl BuildContext {
blob_inline_meta: bool,
features: Features,
encrypt: bool,
blob_cache_writer: Option<Mutex<ArtifactFileWriter>>,
blob_meta_writer: Option<Mutex<ArtifactFileWriter>>,
) -> Self {
// It's a flag for images built with new nydus-image 2.2 and newer.
let mut blob_features = BlobFeatures::CAP_TAR_TOC;
Expand Down Expand Up @@ -1250,6 +1260,8 @@ impl BuildContext {

features,
configuration: Arc::new(ConfigV2::default()),
blob_cache_writer,
blob_meta_writer,
}
}

Expand Down Expand Up @@ -1299,6 +1311,8 @@ impl Default for BuildContext {
blob_inline_meta: false,
features: Features::new(),
configuration: Arc::new(ConfigV2::default()),
blob_cache_writer: None,
blob_meta_writer: None,
}
}
}
Expand Down
14 changes: 13 additions & 1 deletion builder/src/core/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
use std::ffi::{OsStr, OsString};
use std::fmt::{self, Display, Formatter, Result as FmtResult};
use std::fs::{self, File};
use std::io::{Read, Write};
use std::io::{Read, Seek, Write};
use std::ops::Deref;
#[cfg(target_os = "linux")]
use std::os::linux::fs::MetadataExt;
Expand Down Expand Up @@ -462,6 +462,18 @@ impl Node {
chunk.set_compressed(is_compressed);
}

if let Some(writer) = ctx.blob_cache_writer.as_ref() {
let mut guard = writer.lock().unwrap();
let curr_pos = guard.seek(std::io::SeekFrom::End(0))?;
if curr_pos < chunk.uncompressed_offset() + aligned_d_size as u64 {
guard.set_len(chunk.uncompressed_offset() + aligned_d_size as u64)?;
}

guard.seek(std::io::SeekFrom::Start(chunk.uncompressed_offset()))?;
guard
.write_all(&chunk_data)
.context("failed to write blob cache")?;
}
event_tracer!("blob_uncompressed_size", +d_size);

Ok(chunk_info)
Expand Down
14 changes: 12 additions & 2 deletions builder/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
extern crate log;

use std::ffi::OsString;
use std::io::Write;
use std::os::unix::ffi::OsStrExt;
use std::path::{Path, PathBuf};

Expand All @@ -26,8 +27,8 @@ pub use self::compact::BlobCompactor;
pub use self::core::bootstrap::Bootstrap;
pub use self::core::chunk_dict::{parse_chunk_dict_arg, ChunkDict, HashChunkDict};
pub use self::core::context::{
ArtifactStorage, ArtifactWriter, BlobContext, BlobManager, BootstrapContext, BootstrapManager,
BuildContext, BuildOutput, ConversionType,
ArtifactFileWriter, ArtifactStorage, ArtifactWriter, BlobContext, BlobManager,
BootstrapContext, BootstrapManager, BuildContext, BuildOutput, ConversionType,
};
pub use self::core::feature::{Feature, Features};
pub use self::core::node::{ChunkSource, NodeChunk};
Expand Down Expand Up @@ -237,6 +238,15 @@ fn finalize_blob(
// blob file.
if !is_tarfs {
blob_writer.finalize(Some(blob_meta_id))?;
if let Some(writer) = ctx.blob_cache_writer.as_ref() {
let mut guard = writer.lock().unwrap();
guard.flush()?;
}

if let Some(writer) = ctx.blob_meta_writer.as_ref() {
let mut guard = writer.lock().unwrap();
guard.flush()?;
}
}
}

Expand Down
2 changes: 2 additions & 0 deletions builder/src/stargz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -935,6 +935,8 @@ mod tests {
false,
Features::new(),
false,
None,
None,
);
ctx.fs_version = RafsVersion::V6;
let mut bootstrap_mgr =
Expand Down
4 changes: 4 additions & 0 deletions builder/src/tarball.rs
Original file line number Diff line number Diff line change
Expand Up @@ -689,6 +689,8 @@ mod tests {
false,
Features::new(),
false,
None,
None,
);
let mut bootstrap_mgr =
BootstrapManager::new(Some(ArtifactStorage::FileDir(tmp_dir)), None);
Expand Down Expand Up @@ -721,6 +723,8 @@ mod tests {
false,
Features::new(),
true,
None,
None,
);
let mut bootstrap_mgr =
BootstrapManager::new(Some(ArtifactStorage::FileDir(tmp_dir)), None);
Expand Down
71 changes: 68 additions & 3 deletions src/bin/nydus-image/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,10 @@ use nix::unistd::{getegid, geteuid};
use nydus::{get_build_time_info, setup_logging};
use nydus_api::{BuildTimeInfo, ConfigV2, LocalFsConfig};
use nydus_builder::{
parse_chunk_dict_arg, ArtifactStorage, BlobCompactor, BlobManager, BootstrapManager,
BuildContext, BuildOutput, Builder, ConversionType, DirectoryBuilder, Feature, Features,
HashChunkDict, Merger, Prefetch, PrefetchPolicy, StargzBuilder, TarballBuilder, WhiteoutSpec,
parse_chunk_dict_arg, ArtifactFileWriter, ArtifactStorage, ArtifactWriter, BlobCompactor,
BlobManager, BootstrapManager, BuildContext, BuildOutput, Builder, ConversionType,
DirectoryBuilder, Feature, Features, HashChunkDict, Merger, Prefetch, PrefetchPolicy,
StargzBuilder, TarballBuilder, WhiteoutSpec,
};
use nydus_rafs::metadata::{RafsSuper, RafsSuperConfig, RafsVersion};
use nydus_storage::backend::localfs::LocalFs;
Expand Down Expand Up @@ -356,6 +357,20 @@ fn prepare_cmd_args(bti_string: &'static str) -> App {
.action(ArgAction::SetTrue)
.required(false)
)
.arg(
Arg::new("blob-cache")
.long("blob-cache")
.help("generate blob cache file")
.value_parser(clap::value_parser!(PathBuf))
.required(false)
)
.arg(
Arg::new("blob-meta")
.long("blob-meta")
.help("generate blob meta file")
.value_parser(clap::value_parser!(PathBuf))
.required(false)
)
);

let app = app.subcommand(
Expand Down Expand Up @@ -801,6 +816,8 @@ impl Command {
let version = Self::get_fs_version(matches)?;
let chunk_size = Self::get_chunk_size(matches, conversion_type)?;
let batch_size = Self::get_batch_size(matches, version, conversion_type, chunk_size)?;
let blob_cache_writer = Self::get_blob_cache_writer(matches, conversion_type)?;
let blob_meta_writer = Self::get_blob_meta_writer(matches, conversion_type)?;
let aligned_chunk = if version.is_v6() && conversion_type != ConversionType::TarToTarfs {
true
} else {
Expand Down Expand Up @@ -1036,6 +1053,8 @@ impl Command {
blob_inline_meta,
features,
encrypt,
blob_cache_writer,
blob_meta_writer,
);
build_ctx.set_fs_version(version);
build_ctx.set_chunk_size(chunk_size);
Expand Down Expand Up @@ -1479,6 +1498,52 @@ impl Command {
}
}

fn get_blob_meta_writer(
matches: &ArgMatches,
conversion_type: ConversionType,
) -> Result<Option<Mutex<ArtifactFileWriter>>> {
if conversion_type == ConversionType::EStargzIndexToRef {
Ok(None)
} else if let Some(p) = matches
.get_one::<PathBuf>("blob-meta")
.map(|b| ArtifactStorage::SingleFile(b.clone()))
{
if conversion_type == ConversionType::TarToTarfs {
bail!(
"conversion type `{}` conflicts with `--blob-meta`",
conversion_type
);
}
let writer = ArtifactFileWriter(ArtifactWriter::new(p)?);
Ok(Some(Mutex::new(writer)))
} else {
Ok(None)
}
}

fn get_blob_cache_writer(
matches: &ArgMatches,
conversion_type: ConversionType,
) -> Result<Option<Mutex<ArtifactFileWriter>>> {
if conversion_type == ConversionType::EStargzIndexToRef {
Ok(None)
} else if let Some(p) = matches
.get_one::<PathBuf>("blob-cache")
.map(|b| ArtifactStorage::SingleFile(b.clone()))
{
if conversion_type == ConversionType::TarToTarfs {
bail!(
"conversion type `{}` conflicts with `--blob-cache`",
conversion_type
);
}
let writer = ArtifactFileWriter(ArtifactWriter::new(p)?);
Ok(Some(Mutex::new(writer)))
} else {
Ok(None)
}
}

// Must specify a path to blob file.
// For cli/binary interface compatibility sake, keep option `backend-config`, but
// it only receives "localfs" backend type and it will be REMOVED in the future
Expand Down

0 comments on commit 8f42e29

Please sign in to comment.