move advance out of Fields

This commit is contained in:
Magnus Ulimoen 2019-12-14 13:59:20 +01:00
parent 946caf5e1b
commit 066c7619f7
2 changed files with 106 additions and 110 deletions

View File

@ -61,7 +61,7 @@ impl Universe {
/// Using artifical dissipation with the upwind operator /// Using artifical dissipation with the upwind operator
pub fn advance_upwind(&mut self, dt: f32) { pub fn advance_upwind(&mut self, dt: f32) {
Field::advance_upwind::<operators::Upwind4>( maxwell::advance_upwind::<operators::Upwind4>(
&self.sys.0, &self.sys.0,
&mut self.sys.1, &mut self.sys.1,
dt, dt,
@ -72,7 +72,7 @@ impl Universe {
} }
pub fn advance(&mut self, dt: f32) { pub fn advance(&mut self, dt: f32) {
Field::advance::<operators::Upwind4>( maxwell::advance::<operators::Upwind4>(
&self.sys.0, &self.sys.0,
&mut self.sys.1, &mut self.sys.1,
dt, dt,

View File

@ -68,23 +68,24 @@ impl Field {
ey.into_shape((ny, nx)).unwrap(), ey.into_shape((ny, nx)).unwrap(),
) )
} }
}
pub(crate) fn advance_upwind<UO>( pub(crate) fn advance_upwind<UO>(
&self, prev: &Field,
fut: &mut Self, fut: &mut Field,
dt: f32, dt: f32,
grid: &Grid<UO>, grid: &Grid<UO>,
work_buffers: Option<&mut WorkBuffers>, work_buffers: Option<&mut WorkBuffers>,
) where ) where
UO: UpwindOperator, UO: UpwindOperator,
{ {
assert_eq!(self.0.shape(), fut.0.shape()); assert_eq!(prev.0.shape(), fut.0.shape());
let mut wb: WorkBuffers; let mut wb: WorkBuffers;
let (y, k, tmp) = if let Some(x) = work_buffers { let (y, k, tmp) = if let Some(x) = work_buffers {
(&mut x.y, &mut x.buf, &mut x.tmp) (&mut x.y, &mut x.buf, &mut x.tmp)
} else { } else {
wb = WorkBuffers::new(self.nx(), self.ny()); wb = WorkBuffers::new(prev.nx(), prev.ny());
(&mut wb.y, &mut wb.buf, &mut wb.tmp) (&mut wb.y, &mut wb.buf, &mut wb.tmp)
}; };
@ -97,7 +98,7 @@ impl Field {
for i in 0..4 { for i in 0..4 {
// y = y0 + c*kn // y = y0 + c*kn
y.assign(&self); y.assign(&prev);
match i { match i {
0 => {} 0 => {}
1 | 2 => { 1 | 2 => {
@ -115,14 +116,12 @@ impl Field {
} }
Zip::from(&mut fut.0) Zip::from(&mut fut.0)
.and(&self.0) .and(&prev.0)
.and(&*k[0]) .and(&*k[0])
.and(&*k[1]) .and(&*k[1])
.and(&*k[2]) .and(&*k[2])
.and(&*k[3]) .and(&*k[3])
.apply(|y1, &y0, &k1, &k2, &k3, &k4| { .apply(|y1, &y0, &k1, &k2, &k3, &k4| *y1 = y0 + dt / 6.0 * (k1 + 2.0 * k2 + 2.0 * k3 + k4));
*y1 = y0 + dt / 6.0 * (k1 + 2.0 * k2 + 2.0 * k3 + k4)
});
} }
/// Solving (Au)_x + (Bu)_y /// Solving (Au)_x + (Bu)_y
@ -132,21 +131,21 @@ impl Field {
/// [ 0, 0, -1] [ 1, 0, 0] /// [ 0, 0, -1] [ 1, 0, 0]
/// [ 0, -1, 0] [ 0, 0, 0] /// [ 0, -1, 0] [ 0, 0, 0]
pub(crate) fn advance<SBP>( pub(crate) fn advance<SBP>(
&self, prev: &Field,
fut: &mut Self, fut: &mut Field,
dt: f32, dt: f32,
grid: &Grid<SBP>, grid: &Grid<SBP>,
work_buffers: Option<&mut WorkBuffers>, work_buffers: Option<&mut WorkBuffers>,
) where ) where
SBP: SbpOperator, SBP: SbpOperator,
{ {
assert_eq!(self.0.shape(), fut.0.shape()); assert_eq!(prev.0.shape(), fut.0.shape());
let mut wb: WorkBuffers; let mut wb: WorkBuffers;
let (y, k, tmp) = if let Some(x) = work_buffers { let (y, k, tmp) = if let Some(x) = work_buffers {
(&mut x.y, &mut x.buf, &mut x.tmp) (&mut x.y, &mut x.buf, &mut x.tmp)
} else { } else {
wb = WorkBuffers::new(self.nx(), self.ny()); wb = WorkBuffers::new(prev.nx(), prev.ny());
(&mut wb.y, &mut wb.buf, &mut wb.tmp) (&mut wb.y, &mut wb.buf, &mut wb.tmp)
}; };
@ -159,7 +158,7 @@ impl Field {
for i in 0..4 { for i in 0..4 {
// y = y0 + c*kn // y = y0 + c*kn
y.assign(&self); y.assign(&prev);
match i { match i {
0 => {} 0 => {}
1 | 2 => { 1 | 2 => {
@ -177,15 +176,12 @@ impl Field {
} }
Zip::from(&mut fut.0) Zip::from(&mut fut.0)
.and(&self.0) .and(&prev.0)
.and(&*k[0]) .and(&*k[0])
.and(&*k[1]) .and(&*k[1])
.and(&*k[2]) .and(&*k[2])
.and(&*k[3]) .and(&*k[3])
.apply(|y1, &y0, &k1, &k2, &k3, &k4| { .apply(|y1, &y0, &k1, &k2, &k3, &k4| *y1 = y0 + dt / 6.0 * (k1 + 2.0 * k2 + 2.0 * k3 + k4));
*y1 = y0 + dt / 6.0 * (k1 + 2.0 * k2 + 2.0 * k3 + k4)
});
}
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]