One Billion Star 3D Stellar Density Isosurface Data Visualization Using Python and Claude.ai [Images and Video]

by matt392 in Circuits > Computers

20 Views, 0 Favorites, 0 Comments

One Billion Star 3D Stellar Density Isosurface Data Visualization Using Python and Claude.ai [Images and Video]

1-billion-stars-two-zoom.png
1-billion-stars-300-grid-zoom-#2.png
1-billion-stars-300-grid-zoom.png
1-billion-stars-one.png
Structure-1-billion-star-pythonPNG.png

One Billion Star 3D Stellar Density Isosurface Data Visualization Using Python and Claude.ai.

Video link: Rotating 3D Video

Still images attached.

Python code below:

"""
3D Stellar Density Isosurface — Streaming Large Dataset Version
GAIA DR3 | Parallax 4.2 to 32,600 light years | 1 billion stars
Streaming chunked loading — no all_chunks list — RAM-safe for any file size
"""

import os
import pandas as pd
import numpy as np
from scipy.ndimage import gaussian_filter
import pyvista as pv
import multiprocessing as mp
import time
import imageio

os.chdir(r'Place file path here to data files.')
# ╔══════════════════════════════════════════════════════════════╗
# ║ SETTINGS ║
# ╚══════════════════════════════════════════════════════════════╝

CSV_FILES = [
'Point-5-to-point-6.csv', # 4.8 GB
'Point4-to-point5.csv', # 6.0 GB
'Point3-Point4.csv', # 7.1 GB
'Point1-to-Point2.csv', # 7.2 GB
'Point2-Point3.csv', # 7.6 GB
'Point6-1-mas.csv', # 11.1 GB
'1-to-769-mas.csv', # 11.9 GB ← biggest last
]

RA_COL = 'ra'
DEC_COL = 'dec'
PAR_COL = 'parallax'

MAX_DIST_LY = 32600
GRID_SIZE = 300
SIGMA = 0.7

N_CORES = 4
CHUNK_SIZE = 100_000

RING_DISTANCES = [500, 1000, 2000, 3262, 5000, 8000, 10000, 13000, 16000]
LABELS = []
OUTPUT_IMAGE = 'isosurface_1B_still.png'

# ╔══════════════════════════════════════════════════════════════╗
# ║ MULTIPROCESSING WORKER ║
# ╚══════════════════════════════════════════════════════════════╝

def process_chunk_worker(args):
chunk, x_range, y_range, z_range, max_dist = args

chunk = chunk.copy()
chunk = chunk.dropna(subset=[RA_COL, DEC_COL, PAR_COL])
chunk = chunk[chunk[PAR_COL] > 0]
chunk['distance_ly'] = 3262.0 / chunk[PAR_COL]
chunk = chunk[np.isfinite(chunk['distance_ly'])]
chunk = chunk[(chunk['distance_ly'] > 0) & (chunk['distance_ly'] <= max_dist)]

gs = len(x_range) - 1
if len(chunk) == 0:
return np.zeros((gs, gs, gs), dtype=np.float32), 0

ra_rad = np.radians(chunk[RA_COL].values)
dec_rad = np.radians(chunk[DEC_COL].values)
dist = chunk['distance_ly'].values

xs = dist * np.cos(dec_rad) * np.cos(ra_rad)
ys = dist * np.cos(dec_rad) * np.sin(ra_rad)
zs = dist * np.sin(dec_rad)

mask = (np.abs(xs) < max_dist) & (np.abs(ys) < max_dist) & (np.abs(zs) < max_dist)
xs, ys, zs = xs[mask], ys[mask], zs[mask]

if len(xs) == 0:
return np.zeros((gs, gs, gs), dtype=np.float32), 0

H, _ = np.histogramdd(
np.column_stack([xs, ys, zs]),
bins=(x_range, y_range, z_range),
)
return H.astype(np.float32), int(mask.sum())


# ╔══════════════════════════════════════════════════════════════╗
# ║ STREAMING CHUNK GENERATOR ║
# ╚══════════════════════════════════════════════════════════════╝

def chunk_generator(csv_path, x_range, y_range, z_range, max_dist):
"""Yields one chunk at a time — never loads full file into RAM."""
for chunk in pd.read_csv(
csv_path,
usecols=[RA_COL, DEC_COL, PAR_COL],
dtype={RA_COL: 'float32', DEC_COL: 'float32', PAR_COL: 'float32'},
chunksize=CHUNK_SIZE,
low_memory=False,
):
yield (chunk, x_range, y_range, z_range, max_dist)


# ╔══════════════════════════════════════════════════════════════╗
# ║ MAIN ║
# ╚══════════════════════════════════════════════════════════════╝

if __name__ == '__main__':
print("Script started!")
t0 = time.time()

print("=" * 65)
print(" 3D Stellar Density Isosurface — Streaming 1 Billion Stars")
print("=" * 65)
print(f" Files: {CSV_FILES}")
print(f" Max dist: {MAX_DIST_LY:,} ly")
print(f" Grid: {GRID_SIZE}³ = {GRID_SIZE**3:,} cells")
print(f" Cores: {N_CORES}")
print(f" Chunk size: {CHUNK_SIZE:,}")
print("=" * 65)

# ── Build grid edges ──────────────────────────────────────
lim = MAX_DIST_LY + 5
x_range = np.linspace(-lim, lim, GRID_SIZE)
y_range = np.linspace(-lim, lim, GRID_SIZE)
z_range = np.linspace(-lim, lim, GRID_SIZE)

H = np.zeros((GRID_SIZE - 1, GRID_SIZE - 1, GRID_SIZE - 1), dtype=np.float32)
total_stars = 0

# ── Stream & process each file ────────────────────────────
for csv_path in CSV_FILES:
if not os.path.exists(csv_path):
print(f"\nWARNING: File not found — {csv_path} (skipping)")
continue

file_size_gb = os.path.getsize(csv_path) / 1e9
print(f"\nStreaming: {csv_path} ({file_size_gb:.2f} GB)")
file_stars = 0
chunk_count = 0

with mp.Pool(processes=N_CORES) as pool:
gen = chunk_generator(csv_path, x_range, y_range, z_range, MAX_DIST_LY)
for h_part, count in pool.imap(process_chunk_worker, gen, chunksize=1):
H += h_part
file_stars += count
chunk_count += 1
if chunk_count % 10 == 0:
elapsed = time.time() - t0
rate = file_stars / elapsed if elapsed > 0 else 0
print(f" Chunk {chunk_count:>5} | Stars: {file_stars:,} | "
f"{elapsed:.0f}s | {rate/1e6:.2f}M stars/sec")

print(f" File total: {file_stars:,} stars")
total_stars += file_stars

print(f"\n{'='*65}")
print(f"All files loaded. Total valid stars: {total_stars:,}")
print(f"Elapsed: {(time.time()-t0)/60:.1f} min")

# ── Smooth ────────────────────────────────────────────────
print(f"\nApplying Gaussian smoothing (sigma={SIGMA})...")
H_smooth = gaussian_filter(H, sigma=SIGMA)
del H

d_min, d_max, d_mean = H_smooth.min(), H_smooth.max(), H_smooth.mean()
print(f"Density stats: min={d_min:.4f}, max={d_max:.4f}, mean={d_mean:.4f}")
print(f"Non-zero cells: {np.sum(H_smooth > 0):,} / {H_smooth.size:,}")

# ── Diagnostic: top 20 densest cells ─────────────────────
print("\n── Top 20 Densest Grid Cells ──")
x_centers = (x_range[:-1] + x_range[1:]) / 2
y_centers = (y_range[:-1] + y_range[1:]) / 2
z_centers = (z_range[:-1] + z_range[1:]) / 2

flat_idx = np.argsort(H_smooth.ravel())[-20:][::-1]
ix, iy, iz = np.unravel_index(flat_idx, H_smooth.shape)

print(f"{'Rank':<5} {'Density':<10} {'X(ly)':<9} {'Y(ly)':<9} {'Z(ly)':<9}"
f" {'RA(deg)':<9} {'Dec(deg)':<10} {'Dist(ly)':<9}")
for rank, (i, j, k) in enumerate(zip(ix, iy, iz), 1):
cx, cy, cz = x_centers[i], y_centers[j], z_centers[k]
dist_c = np.sqrt(cx**2 + cy**2 + cz**2)
ra_d = np.degrees(np.arctan2(cy, cx)) % 360
dec_d = np.degrees(np.arcsin(np.clip(cz / dist_c, -1, 1))) if dist_c > 0 else 0
print(f"{rank:<5} {H_smooth[i,j,k]:<10.2f} {cx:<9.1f} {cy:<9.1f} {cz:<9.1f}"
f" {ra_d:<9.1f} {dec_d:<10.1f} {dist_c:<9.1f}")

print("\nHint: Sco-Cen ~ RA 269, Dec -35 | Orion OB1 ~ RA 84, Dec 0")
print(" Local Bubble boundary ~ 300-1000 ly | Pleiades ~ RA 56, Dec +24\n")

# ── PyVista grid ──────────────────────────────────────────
print("Building PyVista grid...")
grid = pv.ImageData()
grid.dimensions = np.array(H_smooth.shape)
spacing_val = (2 * lim) / (GRID_SIZE - 1)
grid.spacing = (spacing_val, spacing_val, spacing_val)
grid.origin = (-lim, -lim, -lim)
grid.point_data['density'] = H_smooth.flatten(order='F')

# ── Auto-scaled thresholds ────────────────────────────────
thresholds = [
(d_max * 0.005, 'darkblue', 0.15, 'Ultra sparse (0.5%)'),
(d_max * 0.01, 'blue', 0.20, 'Very sparse (1%)'),
(d_max * 0.02, 'cyan', 0.30, 'Sparse (2%)'),
(d_max * 0.05, 'green', 0.45, 'Medium (5%)'),
(d_max * 0.10, 'yellow', 0.55, 'Dense (10%)'),
(d_max * 0.20, 'orange', 0.70, 'Very dense (20%)'),
(d_max * 0.40, 'red', 0.85, 'Maximum (40%)'),
]

print(f"Auto-scaled thresholds (max density = {d_max:.2f}):")
for t in thresholds:
print(f" {t[3]}: {t[0]:.4f}")

# ── Plotter ───────────────────────────────────────────────
plotter = pv.Plotter()
plotter.set_background('black')

surfaces_added = 0
for threshold, color, opacity, label in thresholds:
try:
contour = grid.contour([threshold], scalars='density')
if contour.n_points > 0:
plotter.add_mesh(contour, color=color, opacity=opacity,
show_edges=False, smooth_shading=True, label=label)
surfaces_added += 1
print(f" Surface [{label}] @ {threshold:.4f}: {contour.n_points:,} points")
else:
print(f" No surface at {threshold:.4f}")
except Exception as e:
print(f" Failed at {threshold:.4f}: {e}")

print(f"Total surfaces added: {surfaces_added}")

# ── Annotations ───────────────────────────────────────────
plotter.add_text(
f'3D Stellar Density Isosurfaces\n{total_stars:,} Stars | 4.2 to 32,600 light years | GAIA DR3',
position='upper_edge', font_size=12, color='white'
)
plotter.add_text(
'Thin shells near Earth = Stellar Desert\nThick shells at distance = Dense regions',
position='lower_left', font_size=9, color='white'
)
plotter.show_axes()

# ── Earth marker ──────────────────────────────────────────
origin_sphere = pv.Sphere(radius=MAX_DIST_LY * 0.012, center=(0, 0, 0))
plotter.add_mesh(origin_sphere, color='white', opacity=0.9)
plotter.add_point_labels(
[[0, 0, 0]], ['Earth'], font_size=12,
text_color='white', point_color='white',
point_size=0, always_visible=True, shape_opacity=0
)

# ── Legend ────────────────────────────────────────────────
legend_entries = [
['Ultra sparse (0.5%)', 'darkblue'],
['Very sparse (1%)', 'blue'],
['Sparse (2%)', 'cyan'],
['Medium (5%)', 'green'],
['Dense (10%)', 'yellow'],
['Very dense (20%)', 'orange'],
['Maximum (40%)', 'red'],
]
plotter.add_legend(legend_entries, bcolor=(0.1, 0.1, 0.1),
border=True, loc='center left', size=(0.18, 0.22))

# ── Show ──────────────────────────────────────────────────
plotter.camera_position = 'iso'
plotter.camera.zoom(1.2)
plotter.show(auto_close=False, interactive=False)
# Phase 1: zoom-out rotation (360 frames)
print("Creating rotation + zoom-out video...")
plotter.open_movie('isosurface_1B_rotation.mp4', framerate=30)
plotter.render()
n_frames_zoom = 360
path_zoom = plotter.generate_orbital_path(n_points=n_frames_zoom, shift=0, viewup=[0,0,1], factor=2.5)
plotter.camera.zoom(2.5)
zoom_per_frame = 1.0 - (1.5 / n_frames_zoom)
for i in range(n_frames_zoom):
plotter.camera.position = path_zoom.points[i]
plotter.camera.zoom(zoom_per_frame)
plotter.write_frame()
# Phase 2: three full rotations
n_frames_full = 180 * 3
path_full = plotter.generate_orbital_path(n_points=n_frames_full, shift=0, viewup=[0,0,1], factor=2.5)
for i in range(n_frames_full):
plotter.camera.position = path_full.points[i]
plotter.write_frame()
plotter.close()
print("Video saved!")

total_time = time.time() - t0
print(f"\n{'='*65}")
print(f" Done! Total runtime: {total_time/60:.1f} minutes")
print(f" Stars: {total_stars:,}")
print(f" Grid: {GRID_SIZE}³ = {GRID_SIZE**3:,} cells")
print(f" Surfaces: {surfaces_added}")
print("=" * 65)

Supplies

LibrariesImage.png
Structure-1-billion-star-pythonPNG.png
  1. Python and IDLE for running code
  2. Data for 1 billion stars from GAIA DR3 repository: https://flathub.flatironinstitute.org/gaiadr3
  3. GAIA is a satellite that provided data on 1.8 billion stars and other interstellar objects. The GAIA data provided some of the most detailed information on the galaxy to date.
  4. Python libraries:
  5. pandas
  6. numpy
  7. scipy [gaussian_filter]
  8. pyvista
  9. multiprocessing
  10. time
  11. imageio

Python Code to Generate 3D Isosurface Video

1-billion-stars-two-zoom.png
1-billion-stars-three-side.png
isosurface_1B_300grid.png
Structure-1-billion-star-pythonPNG.png

One Billion Star 3D Stellar Density Isosurface Data Visualization Using Python and Claude.ai.

Video link: Rotating 3D Video

Still images attached.

Python code below:

"""
3D Stellar Density Isosurface — Streaming Large Dataset Version
GAIA DR3 | Parallax 4.2 to 32,600 light years | 1 billion stars
Streaming chunked loading — no all_chunks list — RAM-safe for any file size
"""

import os
import pandas as pd
import numpy as np
from scipy.ndimage import gaussian_filter
import pyvista as pv
import multiprocessing as mp
import time
import imageio

os.chdir(r'Place file path here to data files.')
# ╔══════════════════════════════════════════════════════════════╗
# ║ SETTINGS ║
# ╚══════════════════════════════════════════════════════════════╝

CSV_FILES = [
'Point-5-to-point-6.csv', # 4.8 GB
'Point4-to-point5.csv', # 6.0 GB
'Point3-Point4.csv', # 7.1 GB
'Point1-to-Point2.csv', # 7.2 GB
'Point2-Point3.csv', # 7.6 GB
'Point6-1-mas.csv', # 11.1 GB
'1-to-769-mas.csv', # 11.9 GB ← biggest last
]

RA_COL = 'ra'
DEC_COL = 'dec'
PAR_COL = 'parallax'

MAX_DIST_LY = 32600
GRID_SIZE = 300
SIGMA = 0.7

N_CORES = 4
CHUNK_SIZE = 100_000

RING_DISTANCES = [500, 1000, 2000, 3262, 5000, 8000, 10000, 13000, 16000]
LABELS = []
OUTPUT_IMAGE = 'isosurface_1B_still.png'

# ╔══════════════════════════════════════════════════════════════╗
# ║ MULTIPROCESSING WORKER ║
# ╚══════════════════════════════════════════════════════════════╝

def process_chunk_worker(args):
chunk, x_range, y_range, z_range, max_dist = args

chunk = chunk.copy()
chunk = chunk.dropna(subset=[RA_COL, DEC_COL, PAR_COL])
chunk = chunk[chunk[PAR_COL] > 0]
chunk['distance_ly'] = 3262.0 / chunk[PAR_COL]
chunk = chunk[np.isfinite(chunk['distance_ly'])]
chunk = chunk[(chunk['distance_ly'] > 0) & (chunk['distance_ly'] <= max_dist)]

gs = len(x_range) - 1
if len(chunk) == 0:
return np.zeros((gs, gs, gs), dtype=np.float32), 0

ra_rad = np.radians(chunk[RA_COL].values)
dec_rad = np.radians(chunk[DEC_COL].values)
dist = chunk['distance_ly'].values

xs = dist * np.cos(dec_rad) * np.cos(ra_rad)
ys = dist * np.cos(dec_rad) * np.sin(ra_rad)
zs = dist * np.sin(dec_rad)

mask = (np.abs(xs) < max_dist) & (np.abs(ys) < max_dist) & (np.abs(zs) < max_dist)
xs, ys, zs = xs[mask], ys[mask], zs[mask]

if len(xs) == 0:
return np.zeros((gs, gs, gs), dtype=np.float32), 0

H, _ = np.histogramdd(
np.column_stack([xs, ys, zs]),
bins=(x_range, y_range, z_range),
)
return H.astype(np.float32), int(mask.sum())


# ╔══════════════════════════════════════════════════════════════╗
# ║ STREAMING CHUNK GENERATOR ║
# ╚══════════════════════════════════════════════════════════════╝

def chunk_generator(csv_path, x_range, y_range, z_range, max_dist):
"""Yields one chunk at a time — never loads full file into RAM."""
for chunk in pd.read_csv(
csv_path,
usecols=[RA_COL, DEC_COL, PAR_COL],
dtype={RA_COL: 'float32', DEC_COL: 'float32', PAR_COL: 'float32'},
chunksize=CHUNK_SIZE,
low_memory=False,
):
yield (chunk, x_range, y_range, z_range, max_dist)


# ╔══════════════════════════════════════════════════════════════╗
# ║ MAIN ║
# ╚══════════════════════════════════════════════════════════════╝

if __name__ == '__main__':
print("Script started!")
t0 = time.time()

print("=" * 65)
print(" 3D Stellar Density Isosurface — Streaming 1 Billion Stars")
print("=" * 65)
print(f" Files: {CSV_FILES}")
print(f" Max dist: {MAX_DIST_LY:,} ly")
print(f" Grid: {GRID_SIZE}³ = {GRID_SIZE**3:,} cells")
print(f" Cores: {N_CORES}")
print(f" Chunk size: {CHUNK_SIZE:,}")
print("=" * 65)

# ── Build grid edges ──────────────────────────────────────
lim = MAX_DIST_LY + 5
x_range = np.linspace(-lim, lim, GRID_SIZE)
y_range = np.linspace(-lim, lim, GRID_SIZE)
z_range = np.linspace(-lim, lim, GRID_SIZE)

H = np.zeros((GRID_SIZE - 1, GRID_SIZE - 1, GRID_SIZE - 1), dtype=np.float32)
total_stars = 0

# ── Stream & process each file ────────────────────────────
for csv_path in CSV_FILES:
if not os.path.exists(csv_path):
print(f"\nWARNING: File not found — {csv_path} (skipping)")
continue

file_size_gb = os.path.getsize(csv_path) / 1e9
print(f"\nStreaming: {csv_path} ({file_size_gb:.2f} GB)")
file_stars = 0
chunk_count = 0

with mp.Pool(processes=N_CORES) as pool:
gen = chunk_generator(csv_path, x_range, y_range, z_range, MAX_DIST_LY)
for h_part, count in pool.imap(process_chunk_worker, gen, chunksize=1):
H += h_part
file_stars += count
chunk_count += 1
if chunk_count % 10 == 0:
elapsed = time.time() - t0
rate = file_stars / elapsed if elapsed > 0 else 0
print(f" Chunk {chunk_count:>5} | Stars: {file_stars:,} | "
f"{elapsed:.0f}s | {rate/1e6:.2f}M stars/sec")

print(f" File total: {file_stars:,} stars")
total_stars += file_stars

print(f"\n{'='*65}")
print(f"All files loaded. Total valid stars: {total_stars:,}")
print(f"Elapsed: {(time.time()-t0)/60:.1f} min")

# ── Smooth ────────────────────────────────────────────────
print(f"\nApplying Gaussian smoothing (sigma={SIGMA})...")
H_smooth = gaussian_filter(H, sigma=SIGMA)
del H

d_min, d_max, d_mean = H_smooth.min(), H_smooth.max(), H_smooth.mean()
print(f"Density stats: min={d_min:.4f}, max={d_max:.4f}, mean={d_mean:.4f}")
print(f"Non-zero cells: {np.sum(H_smooth > 0):,} / {H_smooth.size:,}")

# ── Diagnostic: top 20 densest cells ─────────────────────
print("\n── Top 20 Densest Grid Cells ──")
x_centers = (x_range[:-1] + x_range[1:]) / 2
y_centers = (y_range[:-1] + y_range[1:]) / 2
z_centers = (z_range[:-1] + z_range[1:]) / 2

flat_idx = np.argsort(H_smooth.ravel())[-20:][::-1]
ix, iy, iz = np.unravel_index(flat_idx, H_smooth.shape)

print(f"{'Rank':<5} {'Density':<10} {'X(ly)':<9} {'Y(ly)':<9} {'Z(ly)':<9}"
f" {'RA(deg)':<9} {'Dec(deg)':<10} {'Dist(ly)':<9}")
for rank, (i, j, k) in enumerate(zip(ix, iy, iz), 1):
cx, cy, cz = x_centers[i], y_centers[j], z_centers[k]
dist_c = np.sqrt(cx**2 + cy**2 + cz**2)
ra_d = np.degrees(np.arctan2(cy, cx)) % 360
dec_d = np.degrees(np.arcsin(np.clip(cz / dist_c, -1, 1))) if dist_c > 0 else 0
print(f"{rank:<5} {H_smooth[i,j,k]:<10.2f} {cx:<9.1f} {cy:<9.1f} {cz:<9.1f}"
f" {ra_d:<9.1f} {dec_d:<10.1f} {dist_c:<9.1f}")

print("\nHint: Sco-Cen ~ RA 269, Dec -35 | Orion OB1 ~ RA 84, Dec 0")
print(" Local Bubble boundary ~ 300-1000 ly | Pleiades ~ RA 56, Dec +24\n")

# ── PyVista grid ──────────────────────────────────────────
print("Building PyVista grid...")
grid = pv.ImageData()
grid.dimensions = np.array(H_smooth.shape)
spacing_val = (2 * lim) / (GRID_SIZE - 1)
grid.spacing = (spacing_val, spacing_val, spacing_val)
grid.origin = (-lim, -lim, -lim)
grid.point_data['density'] = H_smooth.flatten(order='F')

# ── Auto-scaled thresholds ────────────────────────────────
thresholds = [
(d_max * 0.005, 'darkblue', 0.15, 'Ultra sparse (0.5%)'),
(d_max * 0.01, 'blue', 0.20, 'Very sparse (1%)'),
(d_max * 0.02, 'cyan', 0.30, 'Sparse (2%)'),
(d_max * 0.05, 'green', 0.45, 'Medium (5%)'),
(d_max * 0.10, 'yellow', 0.55, 'Dense (10%)'),
(d_max * 0.20, 'orange', 0.70, 'Very dense (20%)'),
(d_max * 0.40, 'red', 0.85, 'Maximum (40%)'),
]

print(f"Auto-scaled thresholds (max density = {d_max:.2f}):")
for t in thresholds:
print(f" {t[3]}: {t[0]:.4f}")

# ── Plotter ───────────────────────────────────────────────
plotter = pv.Plotter()
plotter.set_background('black')

surfaces_added = 0
for threshold, color, opacity, label in thresholds:
try:
contour = grid.contour([threshold], scalars='density')
if contour.n_points > 0:
plotter.add_mesh(contour, color=color, opacity=opacity,
show_edges=False, smooth_shading=True, label=label)
surfaces_added += 1
print(f" Surface [{label}] @ {threshold:.4f}: {contour.n_points:,} points")
else:
print(f" No surface at {threshold:.4f}")
except Exception as e:
print(f" Failed at {threshold:.4f}: {e}")

print(f"Total surfaces added: {surfaces_added}")

# ── Annotations ───────────────────────────────────────────
plotter.add_text(
f'3D Stellar Density Isosurfaces\n{total_stars:,} Stars | 4.2 to 32,600 light years | GAIA DR3',
position='upper_edge', font_size=12, color='white'
)
plotter.add_text(
'Thin shells near Earth = Stellar Desert\nThick shells at distance = Dense regions',
position='lower_left', font_size=9, color='white'
)
plotter.show_axes()

# ── Earth marker ──────────────────────────────────────────
origin_sphere = pv.Sphere(radius=MAX_DIST_LY * 0.012, center=(0, 0, 0))
plotter.add_mesh(origin_sphere, color='white', opacity=0.9)
plotter.add_point_labels(
[[0, 0, 0]], ['Earth'], font_size=12,
text_color='white', point_color='white',
point_size=0, always_visible=True, shape_opacity=0
)

# ── Legend ────────────────────────────────────────────────
legend_entries = [
['Ultra sparse (0.5%)', 'darkblue'],
['Very sparse (1%)', 'blue'],
['Sparse (2%)', 'cyan'],
['Medium (5%)', 'green'],
['Dense (10%)', 'yellow'],
['Very dense (20%)', 'orange'],
['Maximum (40%)', 'red'],
]
plotter.add_legend(legend_entries, bcolor=(0.1, 0.1, 0.1),
border=True, loc='center left', size=(0.18, 0.22))

# ── Show ──────────────────────────────────────────────────
plotter.camera_position = 'iso'
plotter.camera.zoom(1.2)
plotter.show(auto_close=False, interactive=False)
# Phase 1: zoom-out rotation (360 frames)
print("Creating rotation + zoom-out video...")
plotter.open_movie('isosurface_1B_rotation.mp4', framerate=30)
plotter.render()
n_frames_zoom = 360
path_zoom = plotter.generate_orbital_path(n_points=n_frames_zoom, shift=0, viewup=[0,0,1], factor=2.5)
plotter.camera.zoom(2.5)
zoom_per_frame = 1.0 - (1.5 / n_frames_zoom)
for i in range(n_frames_zoom):
plotter.camera.position = path_zoom.points[i]
plotter.camera.zoom(zoom_per_frame)
plotter.write_frame()
# Phase 2: three full rotations
n_frames_full = 180 * 3
path_full = plotter.generate_orbital_path(n_points=n_frames_full, shift=0, viewup=[0,0,1], factor=2.5)
for i in range(n_frames_full):
plotter.camera.position = path_full.points[i]
plotter.write_frame()
plotter.close()
print("Video saved!")

total_time = time.time() - t0
print(f"\n{'='*65}")
print(f" Done! Total runtime: {total_time/60:.1f} minutes")
print(f" Stars: {total_stars:,}")
print(f" Grid: {GRID_SIZE}³ = {GRID_SIZE**3:,} cells")
print(f" Surfaces: {surfaces_added}")
print("=" * 65)