fargopy 0.4.0__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fargopy/fsimulation.py DELETED
@@ -1,429 +0,0 @@
1
- import pandas as pd
2
- import numpy as np
3
- import fargopy as fp
4
-
5
- import matplotlib.pyplot as plt
6
- import plotly.figure_factory as ff
7
- from plotly.subplots import make_subplots
8
- import plotly.graph_objects as go
9
- from matplotlib.animation import FFMpegWriter
10
-
11
- from ipywidgets import interact, FloatSlider, IntSlider
12
- from celluloid import Camera
13
- from IPython.display import HTML, Video
14
-
15
- from scipy.interpolate import griddata
16
- from scipy.integrate import solve_ivp
17
- from tqdm import tqdm
18
-
19
-
20
- class FieldInterpolate:
21
- def __init__(self, sim):
22
- self.sim = sim
23
- self.df = None
24
- self.plane = None
25
- self.angle = None
26
-
27
- def load_data(self, field=None, plane=None, angle=None, snapshots=None):
28
- """
29
- Loads data in 2D or 3D depending on the provided parameters.
30
-
31
- Parameters:
32
- field (list of str, optional): List of fields to load (e.g., ["gasdens", "gasv"]).
33
- plane (str, optional): Plane for 2D data ('XZ', 'XY', 'YZ'). Required for 2D.
34
- angle (float, optional): Angle for the 2D slice. Required for 2D.
35
- snapshots (list or int, optional): List of snapshot indices or a single snapshot to load. Required for both 2D and 3D.
36
- Returns:
37
- pd.DataFrame: DataFrame containing the loaded data.
38
- """
39
- if field is None:
40
- raise ValueError("You must specify at least one field to load using the 'fields' parameter.")
41
-
42
- if (plane and not angle) or (angle and not plane):
43
- raise ValueError("Both 'plane' and 'angle' must be provided for 2D data.")
44
-
45
- if angle and not isinstance(angle, str):
46
- raise ValueError("'angle' must be a str example: angle='theta=1.5' [rad]")
47
-
48
- if not isinstance(snapshots, (int, list, tuple)):
49
- raise ValueError("'snapshots' must be an integer, a list, or a tuple.")
50
-
51
- if isinstance(snapshots, (list, tuple)) and len(snapshots) == 2:
52
- if snapshots[0] > snapshots[1]:
53
- raise ValueError("The range in 'snapshots' is invalid. The first value must be less than or equal to the second.")
54
-
55
- if not hasattr(self.sim, "domains") or self.sim.domains is None:
56
- raise ValueError("Simulation domains are not loaded. Ensure the simulation data is properly initialized.")
57
-
58
- # Convert a single snapshot to a list
59
- if isinstance(snapshots, int):
60
- snapshots = [snapshots]
61
-
62
- # Handle the case where snapshots is a single value or a list with one value
63
- if len(snapshots) == 1:
64
- snaps = snapshots
65
- time_values = [0] # Single snapshot corresponds to a single time value
66
- else:
67
- snaps = np.arange(snapshots[0], snapshots[1] + 1)
68
- time_values = np.linspace(0, 1, len(snaps))
69
-
70
- if plane and angle: # Load 2D data
71
-
72
- # Map plane to coordinate names
73
- plane_map = {
74
- "XY": ("x", "y", "vx", "vy"),
75
- "XZ": ("x", "z", "vx", "vz"),
76
- "YZ": ("y", "z", "vy", "vz")
77
- }
78
-
79
- if plane not in plane_map:
80
- raise ValueError(f"Invalid plane '{plane}'. Valid options are 'XY', 'XZ', 'YZ'.")
81
-
82
- coord1, coord2, vel1, vel2 = plane_map[plane]
83
-
84
- # Dynamically create DataFrame columns based on the fields
85
- columns = ["snapshot", "time", coord1, coord2]
86
- if field=="gasdens":
87
- print(f"Loading 2D density data for plane {plane} at angle {angle} rad.")
88
- columns.append("gasdens")
89
- if field=="gasv":
90
- columns.extend([vel1, vel2])
91
- print(f"Loading 2D gas velocity data for plane {plane} at angle {angle} rad.")
92
-
93
- if field=="gasenergy":
94
- columns.append("gasenergy")
95
- print(f"Loading 2D gas energy data for plane {plane} at angle {angle} rad.")
96
- df_snapshots = pd.DataFrame(columns=columns)
97
-
98
- for i, snap in enumerate(snaps):
99
- row = {"snapshot": snap, "time": time_values[i]}
100
-
101
- # Assign coordinates for all fields
102
- gasv = self.sim.load_field('gasv', snapshot=snap, type='vector')
103
- _, mesh = gasv.meshslice(slice=angle)
104
- coord1_vals, coord2_vals = getattr(mesh, coord1), getattr(mesh, coord2)
105
- row[coord1] = coord1_vals
106
- row[coord2] = coord2_vals
107
-
108
- if field=="gasdens" :
109
- gasd = self.sim.load_field('gasdens', snapshot=snap, type='scalar')
110
- gasd_slice, _ = gasd.meshslice(slice=angle)
111
- row["gasdens"] = gasd_slice
112
-
113
- if field=="gasv":
114
- gasvx, gasvy, gasvz = gasv.to_cartesian()
115
- vel1_slice, _ = getattr(gasvx, f"meshslice")(slice=angle)
116
- vel2_slice, _ = getattr(gasvy, f"meshslice")(slice=angle)
117
- row[vel1] = vel1_slice
118
- row[vel2] = vel2_slice
119
-
120
- if field=="gasenergy":
121
- gasenergy = self.sim.load_field('gasenergy', snapshot=snap, type='scalar')
122
- gasenergy_slice, _ = gasenergy.meshslice(slice=angle)
123
- row["gasenergy"] = gasenergy_slice
124
-
125
- # Convert the row to a DataFrame and concatenate it
126
- row_df = pd.DataFrame([row])
127
- df_snapshots = pd.concat([df_snapshots, row_df], ignore_index=True)
128
-
129
- self.df = df_snapshots
130
- return df_snapshots
131
-
132
- elif plane is None and angle is None: # Load 3D data
133
- print("Loading 3D data.")
134
-
135
-
136
- # Generate 3D mesh
137
- theta, r, phi = np.meshgrid(self.sim.domains.theta, self.sim.domains.r, self.sim.domains.phi, indexing='ij')
138
- x, y, z = r * np.sin(theta) * np.cos(phi), r * np.sin(theta) * np.sin(phi), r * np.cos(theta)
139
-
140
-
141
- # Create an empty DataFrame for the current field
142
- columns = ["snapshot", "time", "x", "y", "z"]
143
- if field == "gasv":
144
- columns.extend(["vx", "vy", "vz"])
145
- else:
146
- columns.append(field)
147
-
148
- df_snapshots = pd.DataFrame(columns=columns)
149
-
150
- for i, snap in enumerate(snaps):
151
- row = {"snapshot": snap, "time": time_values[i], "x": x.ravel(), "y": y.ravel(), "z": z.ravel()}
152
-
153
- if field == "gasdens":
154
- gasd = self.sim.load_field("gasdens", snapshot=snap, type="scalar")
155
- row[field] = gasd.data.ravel()
156
-
157
- elif field == "gasv":
158
- gasv = self.sim.load_field("gasv", snapshot=snap, type="vector")
159
- gasvx, gasvy, gasvz = gasv.to_cartesian()
160
- row["vx"] = gasvx.data.ravel()
161
- row["vy"] = gasvy.data.ravel()
162
- row["vz"] = gasvz.data.ravel()
163
-
164
- elif field == "gasenergy":
165
- gasenergy = self.sim.load_field("gasenergy", snapshot=snap, type="scalar")
166
- row[field] = gasenergy.data.ravel()
167
-
168
- # Append the row to the DataFrame
169
- df_snapshots = pd.concat([df_snapshots, pd.DataFrame([row])], ignore_index=True)
170
-
171
- self.df = df_snapshots
172
- # Return the single DataFrame
173
- return df_snapshots
174
-
175
-
176
- def evaluate(self, time, var1, var2, var3=None):
177
- """
178
- Interpolates a field in 2D or 3D depending on the provided parameters.
179
-
180
- Parameters:
181
- time (float): Time at which to interpolate.
182
- var1, var2 (numpy.ndarray or float): Spatial coordinates in 2D.
183
- var3 (numpy.ndarray or float, optional): Additional coordinate for 3D. If None, 2D is assumed.
184
-
185
- Returns:
186
- numpy.ndarray or float: Interpolated field values at the given coordinates.
187
- If velocity fields are present, returns a tuple (vx, vy, vz) or (vx, vy).
188
- """
189
- # Automatically determine the field to interpolate
190
- if "gasdens" in self.df.columns:
191
- field_name = "gasdens"
192
- elif "gasenergy" in self.df.columns:
193
- field_name = "gasenergy"
194
- elif {"vx", "vy", "vz"}.issubset(self.df.columns): # 3D velocity
195
- field_name = ["vx", "vy", "vz"]
196
- elif {"vx", "vy"}.issubset(self.df.columns): # 2D velocity (vx, vy)
197
- field_name = ["vx", "vy"]
198
- elif {"vx", "vz"}.issubset(self.df.columns): # 2D velocity (vx, vz)
199
- field_name = ["vx", "vz"]
200
- elif {"vy", "vz"}.issubset(self.df.columns): # 2D velocity (vy, vz)
201
- field_name = ["vy", "vz"]
202
- else:
203
- raise ValueError("No valid field found in the DataFrame for interpolation.")
204
-
205
- # Sort the DataFrame by time
206
- df_sorted = self.df.sort_values("time")
207
- idx = df_sorted["time"].searchsorted(time) - 1
208
- if idx == -1:
209
- idx = 0
210
- idx_after = min(idx + 1, len(df_sorted) - 1)
211
-
212
- t0, t1 = df_sorted.iloc[idx]["time"], df_sorted.iloc[idx_after]["time"]
213
- factor = (time - t0) / (t1 - t0) if t1 > t0 else 0
214
- if factor < 0:
215
- factor = 0
216
-
217
- # Check if the input is a single point or a mesh
218
- is_scalar = np.isscalar(var1) and np.isscalar(var2) and (var3 is None or np.isscalar(var3))
219
- if is_scalar:
220
- result_shape = ()
221
- else:
222
- result_shape = var1.shape # Preserve the shape of the input mesh
223
-
224
- def interp(idx, field):
225
- if var3 is not None: # 3D interpolation
226
- coord_x = np.array(df_sorted.iloc[idx]["x"])
227
- coord_y = np.array(df_sorted.iloc[idx]["y"])
228
- coord_z = np.array(df_sorted.iloc[idx]["z"])
229
- points = np.column_stack((coord_x.ravel(), coord_y.ravel(), coord_z.ravel()))
230
- data = np.array(df_sorted.iloc[idx][field]).ravel()
231
- return griddata(points, data, (var1, var2, var3), method='nearest', fill_value=0.0)
232
- else: # 2D interpolation
233
- if 'x' in self.df.columns and 'y' in self.df.columns:
234
- coord1 = np.array(df_sorted.iloc[idx]["x"])
235
- coord2 = np.array(df_sorted.iloc[idx]["y"])
236
- elif 'x' in self.df.columns and 'z' in self.df.columns:
237
- coord1 = np.array(df_sorted.iloc[idx]["x"])
238
- coord2 = np.array(df_sorted.iloc[idx]["z"])
239
- elif 'y' in self.df.columns and 'z' in self.df.columns:
240
- coord1 = np.array(df_sorted.iloc[idx]["y"])
241
- coord2 = np.array(df_sorted.iloc[idx]["z"])
242
- else:
243
- raise ValueError("Insufficient spatial coordinates for interpolation.")
244
- points = np.column_stack((coord1.ravel(), coord2.ravel()))
245
- data = np.array(df_sorted.iloc[idx][field]).ravel()
246
- return griddata(points, data, (var1, var2), method='linear', fill_value=0.0)
247
-
248
- # Preallocate arrays for results
249
- if isinstance(field_name, list): # Velocity (multiple fields)
250
- results = []
251
- for field in field_name:
252
- interpolated = (1 - factor) * interp(idx, field) + factor * interp(idx_after, field)
253
- if is_scalar:
254
- results.append(interpolated.item()) # Extract scalar value
255
- else:
256
- results.append(interpolated)
257
- return results
258
- else: # Scalar field (gasdens or gasenergy)
259
- interpolated = (1 - factor) * interp(idx, field_name) + factor * interp(idx_after, field_name)
260
- if is_scalar:
261
- return interpolated.item() # Extract scalar value
262
- else:
263
- return interpolated
264
-
265
-
266
-
267
-
268
- class fluidmotion:
269
- def __init__(self, plane, angle, num_snapshots, dir_path):
270
- """
271
- Initializes the Simulation class.
272
-
273
- Parameters:
274
- plane (str): The plane for 2D data ('XZ', 'XY', 'YZ').
275
- angle (float): The angle for the 2D slice.
276
- num_snapshots (int): Number of snapshots for 2D data.
277
- dir_path (str): Directory path where the simulation data is stored.
278
- """
279
- self.sim = fp.Simulation(output_dir=dir_path)
280
- self.data_handler = DataHandler(self.sim)
281
- self.data_handler.load_data(plane, angle, num_snapshots)
282
-
283
- def velocity_field(self, t, y):
284
- """
285
- Computes the velocity field at a given time and position.
286
-
287
- Parameters:
288
- t (float): Time at which to compute the velocity field.
289
- y (array-like): Position [var1, var2] where the velocity is computed.
290
-
291
- Returns:
292
- numpy.ndarray: Velocity vector [v1, v2] at the given position and time.
293
- """
294
- var1, var2 = y
295
- v1, v2 = self.data_handler.interpolate_velocity(t, np.array([var1]), np.array([var2]))
296
- return np.array([v1[0], v2[0]])
297
-
298
- def integrate_particles(self, particle_pos, time, dt=0.01):
299
- """
300
- Integrates all particles using an explicit Euler step.
301
-
302
- Parameters:
303
- particle_pos (numpy.ndarray): Array of particle positions (shape: [n_particles, 2]).
304
- time (float): Current time of the simulation.
305
- dt (float): Time step for integration.
306
-
307
- Returns:
308
- numpy.ndarray: Updated particle positions after integration.
309
- """
310
- if len(particle_pos) == 0:
311
- return np.array([])
312
-
313
- v1, v2 = self.data_handler.interpolate_velocity(time, particle_pos[:, 0], particle_pos[:, 1])
314
-
315
- # Euler step: x_{n+1} = x_n + v * dt
316
- particle_pos[:, 0] += v1 * dt
317
- particle_pos[:, 1] += v2 * dt
318
-
319
- return particle_pos
320
-
321
- def generate_uniform_particles(self, var1_min, var1_max, var2_min, var2_max, num_particles):
322
- """
323
- Generates uniformly distributed particles within a specified region.
324
-
325
- Parameters:
326
- var1_min, var1_max (float): Range for the first coordinate.
327
- var2_min, var2_max (float): Range for the second coordinate.
328
- num_particles (int): Number of particles to generate.
329
-
330
- Returns:
331
- numpy.ndarray: Array of particle positions (shape: [num_particles, 2]).
332
- """
333
- grid_size = int(np.sqrt(num_particles))
334
- var1_candidates = np.linspace(var1_min + 0.01, var1_max - 0.01, grid_size)
335
- var2_candidates = np.linspace(var2_min + 0.001, var2_max - 0.001, grid_size)
336
- VAR1_grid, VAR2_grid = np.meshgrid(var1_candidates, var2_candidates, indexing='ij')
337
-
338
- density_values = self.data_handler.interpolate_density(0, VAR1_grid, VAR2_grid)
339
- valid_mask = density_values > 0
340
-
341
- valid_var1 = VAR1_grid[valid_mask]
342
- valid_var2 = VAR2_grid[valid_mask]
343
-
344
- if len(valid_var1) == 0:
345
- return []
346
-
347
- num_valid_points = min(num_particles, len(valid_var1))
348
- new_particles = np.column_stack((valid_var1[:num_valid_points], valid_var2[:num_valid_points]))
349
-
350
- return new_particles
351
-
352
- def run_simulation(self, res, var1_min, var1_max, var2_min, var2_max, ts, npi, max_lifetime, generation_interval):
353
- """
354
- Runs the particle simulation and generates an animation.
355
-
356
- Parameters:
357
- res (int): Resolution of the grid for density interpolation.
358
- var1_min, var1_max (float): Range for the first coordinate.
359
- var2_min, var2_max (float): Range for the second coordinate.
360
- ts (int): Number of time steps for the simulation.
361
- npi (int): Number of particles to generate at each interval.
362
- max_lifetime (int): Maximum lifetime of particles.
363
- generation_interval (int): Interval for generating new particles.
364
-
365
- Returns:
366
- IPython.display.Video: Animation of the particle simulation.
367
- """
368
- var1_reg, var2_reg = np.linspace(var1_min, var1_max, res), np.linspace(var2_min, var2_max, res)
369
- VAR1_reg, VAR2_reg = np.meshgrid(var1_reg, var2_reg, indexing='ij')
370
-
371
- t_span = (0, 1)
372
- t_eval = np.linspace(t_span[0], t_span[1], ts)
373
-
374
- particle_pos = np.empty((0, 2))
375
- lifetimes = np.empty(0)
376
- new_particles = self.generate_uniform_particles(var1_min, var1_max, var2_min, var2_max, npi)
377
-
378
- # Determine the y-axis label based on the plane
379
- plane = self.data_handler.plane
380
- y_label = "Z [AU]" if plane == "XZ" else "Y [AU]"
381
-
382
- fig, ax = plt.subplots(figsize=(8, 8))
383
- camera = Camera(fig)
384
-
385
- with tqdm(total=len(t_eval), desc="Generating animation", unit="frame") as pbar:
386
- for frame in range(len(t_eval)):
387
- time = t_eval[frame]
388
- if frame % generation_interval == 0:
389
- particle_pos = np.vstack([particle_pos, new_particles])
390
- lifetimes = np.concatenate([lifetimes, np.full(len(new_particles), max_lifetime)])
391
-
392
- updated_pos = self.integrate_particles(particle_pos, time, dt=0.01)
393
- updated_pos = np.array([pos for pos in updated_pos if pos is not None])
394
- updated_lifetimes = lifetimes - 1
395
-
396
- valid_indices = updated_lifetimes > 0
397
- particle_pos = updated_pos[valid_indices]
398
- lifetimes = updated_lifetimes[valid_indices]
399
-
400
- lifetimes_normalized = lifetimes / max_lifetime
401
-
402
- # Add density interpolation as background
403
- gasd_interpolated = self.data_handler.interpolate_density(time, VAR1_reg, VAR2_reg)
404
- c = ax.pcolormesh(VAR1_reg, VAR2_reg, np.log10(gasd_interpolated * self.sim.URHO * 1e3),
405
- cmap="viridis", shading='auto')
406
-
407
- # Plot particles
408
- if len(particle_pos) > 0:
409
- ax.scatter(particle_pos[:, 0], particle_pos[:, 1], c='lightgray', alpha=lifetimes_normalized, s=1.0)
410
-
411
- ax.set_xlim(var1_min, var1_max)
412
- ax.set_ylim(var2_min, var2_max)
413
- ax.set_xlabel(r"$r \ [AU]$", size=12)
414
- ax.set_ylabel(y_label, size=12)
415
- camera.snap()
416
-
417
- pbar.update(1)
418
-
419
- # Add color bar
420
- fig.colorbar(c, ax=ax, label=r'$\log_{10}(\rho)$ [kg/m$^3$]')
421
- plt.close(fig)
422
- animation = camera.animate()
423
- video_filename = 'particles.mp4'
424
- animation.save(video_filename, writer=FFMpegWriter(fps=10, codec='libx264', bitrate=5000))
425
-
426
- # Display the video in the interactive environment
427
- return Video(video_filename, embed=True)
428
-
429
-
File without changes
fargopy/util.py DELETED
@@ -1,21 +0,0 @@
1
- ###############################################################
2
- # FARGOpy interdependencies
3
- ###############################################################
4
- import fargopy
5
-
6
- ###############################################################
7
- # Required packages
8
- ###############################################################
9
-
10
- ###############################################################
11
- # Constants
12
- ###############################################################
13
-
14
- ###############################################################
15
- # Classes
16
- ###############################################################
17
-
18
- class Util(object):
19
- """Util routines for FARGOpy.
20
- """
21
- pass
fargopy/version.py DELETED
@@ -1 +0,0 @@
1
- version='0.4.0'
@@ -1,15 +0,0 @@
1
- #!/bin/sh
2
- FP_DOTDIR=$HOME/.fargopy/
3
-
4
- if [ ! -d $FP_DOTDIR ]
5
- then
6
- # This is the first time that FARGOpy will be executer
7
- cat <<EOF > /tmp/ifargopy_initialize.py
8
- import fargopy as fp
9
- fp.initialize('configure')
10
- print('We have configured fargopy for the first time. Run it again.')
11
- EOF
12
- ipython -i /tmp/ifargopy_initialize.py
13
- else
14
- ipython -i $FP_DOTDIR/ifargopy.py $@
15
- fi