diff --git a/.gitignore b/.gitignore index 82f9275..d5c895e 100644 --- a/.gitignore +++ b/.gitignore @@ -160,3 +160,5 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ + +simulation_output/*.csv diff --git a/main.py b/main.py index a8d01ef..c915fd4 100644 --- a/main.py +++ b/main.py @@ -11,7 +11,7 @@ # import projections # Setup grid parameters -grid_size = (0.6, 1) # Adjust as needed +grid_size = (0.6, 0.2) # Adjust as needed cell_size = 0.01 # Adjust based on your simulation scale # Initialize the grid @@ -32,13 +32,13 @@ # Define disk parameters disk1_radius = 8*cell_size -disk1_center = (0.2, 0.5) +disk1_center = (0.2, 0.1) disk1_object_id = 1 disk2_radius = 8*cell_size # Calculate the center of the second disk # It should be 2 cells from the edge of the first disk -disk2_x = disk1_center[0] + disk1_radius + 3*cell_size + disk2_radius +disk2_x = disk1_center[0] + disk1_radius + 2*cell_size + disk2_radius disk2_center = (disk2_x, disk1_center[1]) disk2_object_id = 2 @@ -56,13 +56,13 @@ 1: { # Object ID 1 "type": "LinearElastic", "density": 1000, # kg/m^3 - "youngs_modulus": 1e6, # Pa + "youngs_modulus": 1e8, # Pa "poisson_ratio": 0.3 }, 2: { # Object ID 2 "type": "LinearElastic", "density": 1000, # kg/m^3 - "youngs_modulus": 1e6, # Pa + "youngs_modulus": 1e8, # Pa "poisson_ratio": 0.3 } } @@ -71,8 +71,8 @@ particles = Particles(combined_particles, material_properties) # Set initial velocities for each object -particles.set_object_velocity(object_id=1, velocity=[2.0, 0.0]) # Object 1 moving right -particles.set_object_velocity(object_id=2, velocity=[-2.0, 0.0]) # Object 2 moving left +particles.set_object_velocity(object_id=1, velocity=[0.50, 0.0]) # Object 1 moving right +particles.set_object_velocity(object_id=2, velocity=[-0.50, 0.0]) # Object 2 moving left print(f"Generated {len(particles1)} particles for disk 1") print(f"Generated {len(particles2)} particles for disk 2") @@ -115,7 +115,7 @@ solver.step() # Save outputs every 100 steps - if step % 1000 == 0: + if step % 100 == 0: output_file = os.path.join(output_dir, f"step_{step:05d}.csv") save_particles_to_csv(particles, output_file) print(f"Saved output for step {step} to {output_file}") diff --git a/particles.py b/particles.py index f6124a8..381375e 100644 --- a/particles.py +++ b/particles.py @@ -27,6 +27,8 @@ def create_particles(self, particle_data): particle = { 'position': position, 'velocity': velocity, + 'acceleration': np.zeros(2), + 'strain_rate': np.zeros((2, 2)), 'density': material_props['density'], 'mass': data['mass'], 'volume': data['volume'], diff --git a/results_vis.py b/results_vis.py index b2d45a2..794f88e 100644 --- a/results_vis.py +++ b/results_vis.py @@ -46,7 +46,7 @@ def read_csv(filename): num_particles = len(initial_positions) # Create the figure and axes -fig = plt.figure(figsize=(20, 15)) +fig = plt.figure(figsize=(10, 8)) gs = fig.add_gridspec(2, 2) ax1 = fig.add_subplot(gs[0, 0]) ax2 = fig.add_subplot(gs[0, 1]) @@ -129,11 +129,16 @@ def update(frame): stress_max = max(stress_max, von_mises.max()) # Update color normalizations - norm1.autoscale(vel_mag) - norm2.vmin = stress_min - norm2.vmax = stress_max + # norm1.autoscale(vel_mag) + # norm1.autoscale(velocities[:, 0]) + norm1.vmin = -2.0#vel_mag.min() + norm1.vmax = 2.0 + # norm2.autoscale(von_mises) + norm2.vmin = 0.0#stress_min + norm2.vmax = 1.0e5#stress_max - scatter1.set_array(vel_mag) + # scatter1.set_array(vel_mag) + scatter1.set_array(velocities[:, 0]) scatter2.set_array(von_mises) # Update colorbars @@ -145,7 +150,7 @@ def update(frame): # Calculate and store energies youngs_modulus = float(csv_files[frame].split('_')[-1].split('.')[0]) # Extract Young's modulus from filename ke = calculate_kinetic_energy(velocities, masses) - ee = calculate_elastic_energy(stress, volumes, youngs_modulus) + ee = 1e-8*calculate_elastic_energy(stress, volumes, youngs_modulus) te = ke + ee kinetic_energy.append(ke) elastic_energy.append(ee) @@ -169,7 +174,7 @@ def update(frame): return scatter1, scatter2, line1, line2, line3, title # Create the animation -anim = FuncAnimation(fig, update, frames=len(csv_files), interval=500, blit=False) +anim = FuncAnimation(fig, update, frames=len(csv_files), interval=50, blit=False) # Save the animation (optional) # anim.save('simulation.mp4', writer='ffmpeg', fps=30) diff --git a/shape_function.py b/shape_function.py index 56e2c86..5048c83 100644 --- a/shape_function.py +++ b/shape_function.py @@ -12,12 +12,29 @@ def shape_function(particle_pos, node_pos, cell_size): Returns: tuple: Shape function values (sx, sy) """ - dx = (particle_pos[0] - node_pos[0]) / cell_size - dy = (particle_pos[1] - node_pos[1]) / cell_size - - sx = 1 - abs(dx) if abs(dx) < 1 else 0 - sy = 1 - abs(dy) if abs(dy) < 1 else 0 - + L = cell_size + lp = cell_size / 4 + + def piecewise_shape(xp, xv): + x_diff = xp - xv + if x_diff <= -L - lp: + return 0 + elif -L - lp < x_diff <= -L + lp: + return ((L + lp + x_diff) ** 2) / (4 * L * lp) + elif -L + lp < x_diff <= -lp: + return 1 + (x_diff / L) + elif -lp < x_diff <= lp: + return 1 - ((x_diff ** 2 + lp ** 2) / (2 * L * lp)) + elif lp < x_diff <= L - lp: + return 1 - (x_diff / L) + elif L - lp < x_diff <= L + lp: + return ((L + lp - x_diff) ** 2) / (4 * L * lp) + else: + return 0 + + sx = piecewise_shape(particle_pos[0], node_pos[0]) + sy = piecewise_shape(particle_pos[1], node_pos[1]) + return sx, sy def gradient_shape_function(particle_pos, node_pos, cell_size): @@ -32,11 +49,28 @@ def gradient_shape_function(particle_pos, node_pos, cell_size): Returns: tuple: Gradient of the shape function (dsx_dx, dsy_dy) """ - dx = (particle_pos[0] - node_pos[0]) / cell_size - dy = (particle_pos[1] - node_pos[1]) / cell_size - - dsx_dx = -1 if -1 < dx < 0 else (1 if 0 <= dx < 1 else 0) - dsy_dy = -1 if -1 < dy < 0 else (1 if 0 <= dy < 1 else 0) - - return dsx_dx / cell_size, dsy_dy / cell_size + L = cell_size + lp = cell_size / 4 + + def piecewise_gradient(xp, xv): + x_diff = xp - xv + if x_diff <= -L - lp: + return 0 + elif -L - lp < x_diff <= -L + lp: + return (L + lp + x_diff) / (2 * L * lp) + elif -L + lp < x_diff <= -lp: + return 1 / L + elif -lp < x_diff <= lp: + return -x_diff / (L * lp) + elif lp < x_diff <= L - lp: + return -1 / L + elif L - lp < x_diff <= L + lp: + return -(L + lp - x_diff) / (2 * L * lp) + else: + return 0 + + dsx_dx = piecewise_gradient(particle_pos[0], node_pos[0]) + dsy_dy = piecewise_gradient(particle_pos[1], node_pos[1]) + + return dsx_dx, dsy_dy diff --git a/solver.py b/solver.py index e7b3893..4cacb6b 100644 --- a/solver.py +++ b/solver.py @@ -112,14 +112,14 @@ def update_grid(self): node.velocity += gravity * dt # Clear forces for the next step - for node in self.grid.nodes.flat: - node.force = np.zeros(2) + # for node in self.grid.nodes.flat: + # node.force = np.zeros(2) def grid_to_particle(self): for p_idx, particle in enumerate(self.particles.particles): particle_velocity_update = np.zeros(2) strain_rate = np.zeros((2, 2)) - + acceleration_update = np.zeros(2) nearby_nodes = self.grid.get_nearby_nodes(particle['position']) for node in nearby_nodes: @@ -129,7 +129,7 @@ def grid_to_particle(self): shape_value = shape_x * shape_y particle_velocity_update += shape_value * node.velocity - + acceleration_update += shape_value * node.force / node.mass # Calculate strain rate components strain_rate[0, 0] += grad_shape_x * shape_y * node.velocity[0] # exx strain_rate[0, 1] += 0.5 * (grad_shape_x * shape_y * node.velocity[1] + @@ -139,12 +139,14 @@ def grid_to_particle(self): # Update particle velocity particle['velocity'] = particle_velocity_update + particle['acceleration'] = acceleration_update # Store strain rate in particle particle['strain_rate'] = strain_rate def update_particles(self): for particle in self.particles.particles: + # particle['velocity']+= particle['acceleration']*self.dt # Update position using current velocity particle['position'] += particle['velocity'] * self.dt