From 66874533d544e339a67dcfa37cf45f1d93ca25e0 Mon Sep 17 00:00:00 2001 From: Gamelin Alexis <gamelin@synchrotron-soleil.fr> Date: Tue, 6 Apr 2021 12:23:52 +0200 Subject: [PATCH] Various improvements on Beam Fix bug on Beam.distance_between_bunches for the last bunch when the first bunches are empty. Change Beam.distance_between_bunches to compute the array only at initalization with Beam.update_distance_between_bunches Beam.mpi_share_distributions can now share all bunch dimensions. --- tracking/particles.py | 73 +++++++++++++++++++++++++++++++++---------- 1 file changed, 57 insertions(+), 16 deletions(-) diff --git a/tracking/particles.py b/tracking/particles.py index 3506db9..0833e8e 100644 --- a/tracking/particles.py +++ b/tracking/particles.py @@ -460,6 +460,8 @@ class Beam: mpi_gather() Gather beam, all bunches of the different processors are sent to all processors. Rather slow + mpi_share_distributions() + Compute the bunch profile and share it between the different bunches. mpi_close() Call mpi_gather and switch off MPI parallelisation plot(var, option=None) @@ -507,12 +509,22 @@ class Beam: @property def distance_between_bunches(self): + """Return an array which contains the distance to the next bunch in + units of the RF period (ring.T1)""" + return self._distance_between_bunches + + def update_distance_between_bunches(self): + """Update the distance_between_bunches array""" filling_pattern = self.filling_pattern distance = np.zeros(filling_pattern.shape) + last_value = 0 + + # All bunches for index, value in enumerate(filling_pattern): if value == False: pass elif value == True: + last_value = index count = 1 for value2 in filling_pattern[index+1:]: if value2 == False: @@ -520,7 +532,17 @@ class Beam: elif value2 == True: break distance[index] = count - return distance + + # Last bunch case + count2 = 0 + for index2, value2 in enumerate(filling_pattern): + if value2 == True: + break + if value2 == False: + count2 += 1 + distance[last_value] += count2 + + self._distance_between_bunches = distance def init_beam(self, filling_pattern, current_per_bunch=1e-3, mp_per_bunch=1e3, track_alive=True): @@ -574,6 +596,7 @@ class Beam: self.bunch_list = bunch_list self.update_filling_pattern() + self.update_distance_between_bunches() for bunch in self.not_empty: bunch.init_gaussian() @@ -676,28 +699,46 @@ class Beam: self.mpi_switch = False self.mpi = None - def mpi_share_distributions(self): - """Share distribution between bunches""" + def mpi_share_distributions(self, dimensions="tau", n_bins=75): + """ + Compute the bunch profile and share it between the different bunches. + + Parameters + ---------- + dimension : str or list of str, optional + Dimensions in which the binning is done. The default is "tau". + n_bin : int or list of int, optional + Number of bins. The default is 75. + + """ if(self.mpi_switch == False): print("Error, mpi is not initialised.") - + + if isinstance(dimensions, str): + dimensions = [dimensions] + + if isinstance(n_bins, int): + n_bins = np.ones((len(dimensions),), dtype=int)*n_bins + bunch = self[self.mpi.bunch_num] - bins, sorted_index, profile, center = bunch.binning(n_bin=75) - - self.mpi.bins_all = np.empty((len(self), len(bins)), dtype=np.float64) - self.mpi.comm.Allgather([bins, MPI.DOUBLE], [self.mpi.bins_all, MPI.DOUBLE]) - - self.mpi.center_all = np.empty((len(self), len(center)), dtype=np.float64) - self.mpi.comm.Allgather([center, MPI.DOUBLE], [self.mpi.center_all, MPI.DOUBLE]) - - self.mpi.profile_all = np.empty((len(self), len(profile)), dtype=np.int64) - self.mpi.comm.Allgather([profile, MPI.INT64_T], [self.mpi.profile_all, MPI.INT64_T]) charge_per_mp_all = self.mpi.comm.allgather(bunch.charge_per_mp) self.mpi.charge_per_mp_all = charge_per_mp_all - - self.mpi.sorted_index = sorted_index + + for i in range(len(dimensions)): + + dim = dimensions[i] + n_bin = n_bins[i] + bins, sorted_index, profile, center = bunch.binning(dimension=dim, n_bin=n_bin) + + self.mpi.__setattr__(dim + "_center", np.empty((len(self), len(center)), dtype=np.float64)) + self.mpi.comm.Allgather([center, MPI.DOUBLE], [self.mpi.__getattribute__(dim + "_center"), MPI.DOUBLE]) + + self.mpi.__setattr__(dim + "_profile", np.empty((len(self), len(profile)), dtype=np.int64)) + self.mpi.comm.Allgather([center, MPI.INT64_T], [self.mpi.__getattribute__(dim + "_profile"), MPI.INT64_T]) + + self.mpi.__setattr__(dim + "_sorted_index", sorted_index) def plot(self, var, option=None): """ -- GitLab