From 08c96b9d887b0382ac11847cd1b91f0949debcf9 Mon Sep 17 00:00:00 2001
From: Gamelin Alexis <alexis.gamelin@synchrotron-soleil.fr>
Date: Thu, 29 Jun 2023 19:23:07 +0200
Subject: [PATCH] Add share_stds method to Mpi

Can be used to compute the bunch standard deviations and share it between the different bunches.
---
 mbtrack2/tracking/parallel.py | 30 +++++++++++++++++++++++++++++-
 1 file changed, 29 insertions(+), 1 deletion(-)

diff --git a/mbtrack2/tracking/parallel.py b/mbtrack2/tracking/parallel.py
index 30f835b..b71cf4d 100644
--- a/mbtrack2/tracking/parallel.py
+++ b/mbtrack2/tracking/parallel.py
@@ -48,13 +48,16 @@ class Mpi:
         Compute the bunch profiles and share it between the different bunches.
     share_means(beam)
         Compute the bunch means and share it between the different bunches.
+    share_stds(beam)
+        Compute the bunch standard deviations and share it between the 
+        different bunches.
         
     References
     ----------
     [1] L. Dalcin, P. Kler, R. Paz, and A. Cosimo, Parallel Distributed 
     Computing using Python, Advances in Water Resources, 34(9):1124-1139, 2011.
-    """
     
+    """
     def __init__(self, filling_pattern):
         from mpi4py import MPI
         self.MPI = MPI
@@ -214,4 +217,29 @@ class Mpi:
         else:
             mean = np.zeros((6,), dtype=np.float64)
         self.comm.Allgather([mean, self.MPI.DOUBLE], [self.mean_all, self.MPI.DOUBLE])
+        
+    def share_stds(self, beam):
+        """
+        Compute the bunch standard deviations and share it between the 
+        different bunches.
+
+        Parameters
+        ----------
+        beam : Beam object
+
+        """
+        if(beam.mpi_switch == False):
+            print("Error, mpi is not initialised.")
+            
+        bunch = beam[self.bunch_num]
+        
+        charge_all = self.comm.allgather(bunch.charge)
+        self.charge_all = charge_all
+        
+        self.std_all = np.empty((self.size, 6), dtype=np.float64)
+        if len(bunch) != 0:
+            std = bunch.std
+        else:
+            std = np.zeros((6,), dtype=np.float64)
+        self.comm.Allgather([std, self.MPI.DOUBLE], [self.std_all, self.MPI.DOUBLE])
                                 
\ No newline at end of file
-- 
GitLab