abstract over hdf5::File

This commit is contained in:
Magnus Ulimoen 2020-04-06 23:10:08 +02:00
parent 13abdefe57
commit d3465d5e1e
1 changed files with 76 additions and 70 deletions

View File

@ -422,10 +422,10 @@ fn main() {
let output = if opt.legacy { let output = if opt.legacy {
None None
} else { } else {
Some(create_hdf(&opt.output, sys.grids.as_slice()).unwrap()) Some(File::create(&opt.output, sys.grids.as_slice()).unwrap())
}; };
if let Some(file) = output.as_ref() { if let Some(file) = output.as_ref() {
add_timestep_to_file(&file, 0, sys.fnow.as_slice()).unwrap(); file.add_timestep(0, sys.fnow.as_slice()).unwrap();
} }
let bar = progressbar(opt.no_progressbar, ntime); let bar = progressbar(opt.no_progressbar, ntime);
@ -440,7 +440,7 @@ fn main() {
bar.finish(); bar.finish();
if let Some(file) = output.as_ref() { if let Some(file) = output.as_ref() {
add_timestep_to_file(&file, ntime, sys.fnow.as_slice()).unwrap(); file.add_timestep(ntime, sys.fnow.as_slice()).unwrap();
} else { } else {
legacy_output(&opt.output, &sys); legacy_output(&opt.output, &sys);
} }
@ -491,12 +491,14 @@ fn legacy_output<T: sbp::operators::UpwindOperator, P: AsRef<std::path::Path>>(
} }
} }
fn create_hdf<P: AsRef<std::path::Path>>( #[derive(Debug, Clone)]
struct File(hdf5::File);
impl File {
fn create<P: AsRef<std::path::Path>>(
path: P, path: P,
grids: &[sbp::grid::Grid], grids: &[sbp::grid::Grid],
) -> Result<hdf5::File, Box<dyn std::error::Error>> { ) -> Result<Self, Box<dyn std::error::Error>> {
let gzip = 7;
let file = hdf5::File::create(path.as_ref())?; let file = hdf5::File::create(path.as_ref())?;
let _tds = file let _tds = file
.new_dataset::<u64>() .new_dataset::<u64>()
@ -510,7 +512,8 @@ fn create_hdf<P: AsRef<std::path::Path>>(
let add_dim = |name| { let add_dim = |name| {
g.new_dataset::<Float>() g.new_dataset::<Float>()
.gzip(gzip) .chunk((grid.ny(), grid.nx()))
.gzip(9)
.create(name, (grid.ny(), grid.nx())) .create(name, (grid.ny(), grid.nx()))
}; };
let xds = add_dim("x")?; let xds = add_dim("x")?;
@ -520,7 +523,8 @@ fn create_hdf<P: AsRef<std::path::Path>>(
let add_var = |name| { let add_var = |name| {
g.new_dataset::<Float>() g.new_dataset::<Float>()
.gzip(gzip) .gzip(3)
.shuffle(true)
.chunk((1, grid.ny(), grid.nx())) .chunk((1, grid.ny(), grid.nx()))
.resizable_idx(&[true, false, false]) .resizable_idx(&[true, false, false])
.create(name, (0, grid.ny(), grid.nx())) .create(name, (0, grid.ny(), grid.nx()))
@ -531,14 +535,15 @@ fn create_hdf<P: AsRef<std::path::Path>>(
add_var("e")?; add_var("e")?;
} }
Ok(file) Ok(Self(file))
} }
fn add_timestep_to_file( fn add_timestep(
file: &hdf5::File, &self,
t: u64, t: u64,
fields: &[euler::Field], fields: &[euler::Field],
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
let file = &self.0;
let tds = file.dataset("t")?; let tds = file.dataset("t")?;
let tpos = tds.size(); let tpos = tds.size();
tds.resize((tpos + 1,))?; tds.resize((tpos + 1,))?;
@ -571,4 +576,5 @@ fn add_timestep_to_file(
eds.write_slice(e, ndarray::s![tpos, .., ..])?; eds.write_slice(e, ndarray::s![tpos, .., ..])?;
} }
Ok(()) Ok(())
}
} }