Skip to content

Commit

Permalink
🪺💻 ↝ [GP-48, #47, #46, GP-34, CPW-29]: Changing snippets to allow for…
Browse files Browse the repository at this point in the history
… updating existing buckets
  • Loading branch information
Gizmotronn committed Jul 7, 2024
1 parent fc0071f commit f7569b7
Show file tree
Hide file tree
Showing 32 changed files with 319 additions and 38 deletions.
37 changes: 0 additions & 37 deletions snippets/assignClouds.py

This file was deleted.

Binary file removed snippets/base.png
Binary file not shown.
107 changes: 107 additions & 0 deletions snippets/base/graphsForBaseSet.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import os
import shutil
import lightkurve as lk
import matplotlib.pyplot as plt
from supabase import create_client, Client

# Initialize Supabase client
supabase_url = 'https://hlufptwhzkpkkjztimzo.supabase.co'
supabase_key = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImhsdWZwdHdoemtwa2tqenRpbXpvIiwicm9sZSI6ImFub24iLCJpYXQiOjE3MTYyOTk3NTUsImV4cCI6MjAzMTg3NTc1NX0.v_NDVWjIU_lJQSPbJ_Y6GkW3axrQWKXfXVsBEAbFv_I'
supabase: Client = create_client(supabase_url, supabase_key)

# Define the local folder path to save files
local_folder = './graphs'

# Function to upload images to Supabase storage
def upload_to_supabase(filepath, bucket_name, folder_name, file_name):
# Check if the file already exists in the bucket
existing_files = supabase.storage.from_(bucket_name).list(folder_name)
if file_name in [file['name'] for file in existing_files]:
print(f"Skipping upload for {file_name} as it already exists.")
return

with open(filepath, 'rb') as f:
supabase.storage.from_(bucket_name).upload(f"{folder_name}/{file_name}", f.read(), file_options={"content-type": "image/png"})

# Function to generate light curves and save plots
def generate_lightcurves(tic_id, output_dir):
# Search for lightcurve data
sector_data = lk.search_lightcurve(tic_id, author='SPOC')
select_sector = sector_data[0:4]
lc_collection = select_sector.download_all()

# Plot individual light curves
fig1, ax1 = plt.subplots()
lc = select_sector.download_all()
lc.plot(ax=ax1, linewidth=0, marker='.', markersize=1, color='midnightblue', alpha=0.3)
fig1.savefig(os.path.join(output_dir, 'individual_light_curves.png'))
plt.close(fig1)

# Stitch the light curves
lc_collection_stitched = lc_collection.stitch()
fig2, ax2 = plt.subplots()
lc_collection_stitched.plot(ax=ax2, linewidth=0, marker='.', markersize=1, color='midnightblue', alpha=0.3)
fig2.savefig(os.path.join(output_dir, 'stitched_light_curves.png'))
plt.close(fig2)

# Bin the light curves with a larger bin time
bin_time = 15 / 24 / 60 # 15-minute binning
lc_collection_binned = lc_collection_stitched.bin(bin_time)
fig3, ax3 = plt.subplots()
lc_collection_binned.plot(ax=ax3, linewidth=0, marker='o', markersize=4, color='red', alpha=0.7)
fig3.savefig(os.path.join(output_dir, 'binned_light_curves.png'))
plt.close(fig3)

# Plot stitched and binned light curves together
fig4, ax4 = plt.subplots(figsize=(10, 5))
lc_collection_stitched.plot(ax=ax4, linewidth=0, marker='.', markersize=1, color='midnightblue', alpha=0.3, label='Unbinned')
lc_collection_binned.plot(ax=ax4, linewidth=0, marker='o', markersize=4, color='red', alpha=0.7, label='Binned')
ax4.legend()
fig4.savefig(os.path.join(output_dir, 'stitched_and_binned_light_curves.png'))
plt.close(fig4)

# Zoom in on a specific time range to highlight transits
fig5, ax5 = plt.subplots(figsize=(10, 5))
lc_collection_stitched.plot(ax=ax5, linewidth=0, marker='.', markersize=1, color='midnightblue', alpha=0.3, label='Unbinned')
lc_collection_binned.plot(ax=ax5, linewidth=0, marker='o', markersize=4, color='red', alpha=0.7, label='Binned')
ax5.set_xlim(lc_collection_stitched.time.min().value, lc_collection_stitched.time.min().value + 5) # Adjust zoom range here
ax5.legend()
fig5.savefig(os.path.join(output_dir, 'zoomed_light_curves.png'))
plt.close(fig5)

# Apply a smoothing filter to the light curve
smoothed_lc = lc_collection_stitched.flatten(window_length=301)
fig6, ax6 = plt.subplots(figsize=(10, 5))
smoothed_lc.plot(ax=ax6, linewidth=0, marker='.', markersize=1, color='midnightblue', alpha=0.3, label='Smoothed')
fig6.savefig(os.path.join(output_dir, 'smoothed_light_curves.png'))
plt.close(fig6)

# Function to process each TIC ID, generate graphs, and upload to Supabase
def process_and_upload(tic_id, anomaly_id):
output_dir = os.path.join(local_folder, str(anomaly_id))
if not os.path.exists(output_dir):
os.makedirs(output_dir)

generate_lightcurves(tic_id, output_dir)

for filename in os.listdir(output_dir):
file_path = os.path.join(output_dir, filename)
if os.path.isfile(file_path):
upload_to_supabase(file_path, 'anomalies', str(anomaly_id), filename)

print(f"Processed TIC ID {tic_id}, Anomaly ID: {anomaly_id}")

# Main function
def main():
tic_ids = ['KIC 8692861', 'KIC 8120608', 'KIC 4138008', 'KIC 10593636', 'EPIC 246199087', 'TIC 150428135']
anomaly_ids = [1, 2, 3, 4, 5, 6]

if os.path.exists(local_folder):
shutil.rmtree(local_folder)
os.makedirs(local_folder)

for tic_id, anomaly_id in zip(tic_ids, anomaly_ids):
process_and_upload(tic_id, anomaly_id)

if __name__ == "__main__":
main()
113 changes: 113 additions & 0 deletions snippets/clouds/assignClouds.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import os
import random
import time
from supabase import create_client, Client

# Set up Supabase client
supabase_url = 'https://hlufptwhzkpkkjztimzo.supabase.co'
supabase_key = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImhsdWZwdHdoemtwa2tqenRpbXpvIiwicm9sZSI6ImFub24iLCJpYXQiOjE3MTYyOTk3NTUsImV4cCI6MjAzMTg3NTc1NX0.v_NDVWjIU_lJQSPbJ_Y6GkW3axrQWKXfXVsBEAbFv_I'
supabase: Client = create_client(supabase_url, supabase_key)

# Get the list of image files from the directory
image_directory = "../anomalies/clouds"
image_files = [f for f in os.listdir(image_directory) if os.path.isfile(os.path.join(image_directory, f))]

# Define a function to upload image to Supabase storage
def upload_to_supabase(filepath, bucket_name, folder_name, file_name, retries=3):
for attempt in range(retries):
try:
# Check if the file already exists in the bucket
existing_files = supabase.storage.from_(bucket_name).list(folder_name)
existing_file_names = [file['name'] for file in existing_files]
if file_name in existing_file_names:
print(f"File {file_name} already exists in {folder_name}. Trying a different folder.")
raise FileExistsError("File already exists") # Trigger an exception to try a different folder

# Upload the file
with open(filepath, 'rb') as f:
supabase.storage.from_(bucket_name).upload(f"{folder_name}/{file_name}", f.read(), file_options={"content-type": "image/png"})
print(f"Uploaded {file_name} to folder {folder_name} in Supabase storage.")
break # Exit loop if upload is successful

except FileExistsError:
# Generate a new folder name to avoid conflicts
new_anomaly_id = random.choice(anomaly_ids)
new_folder_name = f"{new_anomaly_id}/clouds"
print(f"Retrying with a new folder {new_folder_name} for file {file_name}.")
folder_name = new_folder_name # Update folder name for retry

except Exception as e:
print(f"Error during upload: {e}. Retrying ({attempt + 1}/{retries})...")
if attempt == retries - 1:
print(f"Failed to upload {file_name} after {retries} attempts.")
else:
time.sleep(2) # Wait before retrying

# Fetch anomalies with anomalytype 'planet'
anomalies_list = supabase.table("anomalies").select("id").eq("anomalytype", "planet").execute()
anomaly_ids = [anomaly['id'] for anomaly in anomalies_list.data]

# Get existing folders from the `anomalies` bucket
def list_folders(bucket_name):
"""List all folders in the given bucket."""
folders = []
response = supabase.storage.from_(bucket_name).list("") # List all items in the bucket
items = response
for item in items:
path_parts = item['name'].split('/')
if len(path_parts) > 1 and path_parts[1] == 'clouds':
continue # Skip paths already under a clouds folder
if len(path_parts) == 1 and path_parts[0].isdigit(): # Check if it's a folder with an ID
folders.append(path_parts[0])
return folders

# Identify folders that do not have a `clouds` directory
def identify_folders_without_clouds(bucket_name):
"""Identify folders without a `clouds` directory."""
folders = list_folders(bucket_name)
folders_without_clouds = []
for folder in folders:
clouds_dir_exists = any(clouds_dir['name'] == f"{folder}/clouds" for clouds_dir in supabase.storage.from_(bucket_name).list(folder))
if not clouds_dir_exists:
folders_without_clouds.append(folder)
return folders_without_clouds

folders_without_clouds = identify_folders_without_clouds('anomalies')

# Define a function to upload images to folders that do not have a `clouds` directory
def upload_to_folders_without_clouds(image_files, folders):
"""Upload images to folders that do not have a `clouds` directory."""
for image_file in image_files:
for folder in folders:
file_path = os.path.join(image_directory, image_file)
folder_name = f"{folder}/clouds"
upload_to_supabase(file_path, 'anomalies', folder_name, image_file)
break # Ensure that we only upload to one folder per image

# Upload images to folders that do not have a `clouds` directory
upload_to_folders_without_clouds(image_files, folders_without_clouds)

# Upload remaining images to existing `clouds` directories or new folders
def upload_to_existing_or_new_folders(image_files):
"""Upload images to existing or new folders."""
for image_file in image_files:
file_path = os.path.join(image_directory, image_file)
for _ in range(5): # Retry up to 5 times to find a folder
# Randomly select an anomaly
anomaly_id = random.choice(anomaly_ids)

# Define folder names
folder_name = f"{anomaly_id}/clouds"

# Try to upload the image file to Supabase storage
try:
upload_to_supabase(file_path, 'anomalies', folder_name, image_file)
break # Exit loop if upload is successful
except Exception as e:
print(f"Error during upload to {folder_name}: {e}")
time.sleep(2) # Wait before retrying
else:
print(f"Failed to upload {image_file} after several attempts.")

# Upload remaining images to existing or new folders
upload_to_existing_or_new_folders(image_files)
Binary file added snippets/graphs/1/binned_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/1/individual_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/1/smoothed_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/1/stitched_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/1/zoomed_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/2/binned_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/2/individual_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/2/smoothed_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/2/stitched_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added snippets/graphs/2/zoomed_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified snippets/output/binned_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified snippets/output/individual_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified snippets/output/smoothed_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified snippets/output/stitched_and_binned_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified snippets/output/stitched_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified snippets/output/zoomed_light_curves.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file removed snippets/output_1/binned_light_curves.png
Binary file not shown.
Binary file removed snippets/output_1/individual_light_curves.png
Binary file not shown.
Binary file removed snippets/output_1/smoothed_light_curves.png
Binary file not shown.
Binary file not shown.
Binary file removed snippets/output_1/stitched_light_curves.png
Binary file not shown.
Binary file removed snippets/output_1/zoomed_light_curves.png
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,34 @@

# Function to upload images to Supabase storage
def upload_to_supabase(filepath, bucket_name, folder_name, file_name):
# Check if the file already exists in the bucket
existing_files = supabase.storage.from_(bucket_name).list(folder_name)
if file_name in [file['name'] for file in existing_files]:
print(f"Skipping upload for {file_name} as it already exists.")
return

with open(filepath, 'rb') as f:
supabase.storage.from_(bucket_name).upload(f"{folder_name}/{file_name}", f.read(), file_options={"content-type": "image/png"})

# Function to check if the anomaly already exists
def anomaly_exists(tic_id):
match = re.search(r'\d+', tic_id)
tic_id_numeric = int(match.group()) if match else None

if tic_id_numeric is None:
raise ValueError("TIC ID must contain a numeric value to be used as the anomaly ID.")

response = supabase.table("anomalies").select("id").eq("id", tic_id_numeric).execute()
return len(response.data) > 0

# Function to insert a new anomaly and get its ID
def insert_anomaly(tic_id):
if anomaly_exists(tic_id):
print(f"Anomaly for {tic_id} already exists. Skipping...")
match = re.search(r'\d+', tic_id)
tic_id_numeric = int(match.group()) if match else None
return tic_id_numeric

# Extract the numeric part of the TIC ID for use as the id
match = re.search(r'\d+', tic_id)
tic_id_numeric = int(match.group()) if match else None
Expand Down Expand Up @@ -109,6 +132,10 @@ def main(tic_ids):
os.makedirs(output_dir)

for tic_id in tic_ids:
if anomaly_exists(tic_id):
print(f"Anomaly for {tic_id} already exists. Skipping...")
continue

anomaly_id = insert_anomaly(tic_id)
anomaly_folder = str(anomaly_id)

Expand All @@ -121,5 +148,5 @@ def main(tic_ids):
print(f"Processed TIC ID {tic_id}, Anomaly ID: {anomaly_id}")

if __name__ == "__main__":
tic_ids_list = ['TIC 50365310', 'TIC 88863718']
tic_ids_list = ['TIC 50365310', 'TIC 88863718', 'TIC 124709665', 'TIC 106997505', 'TIC 238597883', 'TIC 169904935', 'TIC 156115721', 'TIC 65212867', 'TIC 440801822']
main(tic_ids_list)
File renamed without changes.
File renamed without changes.
71 changes: 71 additions & 0 deletions snippets/planets/downloadPlanetAnomalies.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import os
import requests
from supabase import create_client, Client
import logging

# Initialize Supabase client
supabase_url = 'https://hlufptwhzkpkkjztimzo.supabase.co'
supabase_key = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImhsdWZwdHdoemtwa2tqenRpbXpvIiwicm9sZSI6ImFub24iLCJpYXQiOjE3MTYyOTk3NTUsImV4cCI6MjAzMTg3NTc1NX0.v_NDVWjIU_lJQSPbJ_Y6GkW3axrQWKXfXVsBEAbFv_I'
supabase: Client = create_client(supabase_url, supabase_key)

# Define the initial folder path
initial_folder = '../anomalies/planets'

# Ensure the initial folder exists
if not os.path.exists(initial_folder):
os.makedirs(initial_folder)

# Set up logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# Base URL for public access to the storage bucket
public_base_url = f'{supabase_url}/storage/v1/object/public/anomalies/'

# Function to download images from Supabase storage and save them to the local file system
def download_images_from_supabase():
offset = 0
while True:
try:
# List objects in the 'anomalies' bucket with pagination
files = supabase.storage.from_('anomalies').list('', {'limit': 1000, 'offset': offset})
if not files:
logger.info("No more files to download.")
break
except Exception as e:
logger.error(f"Error listing files from Supabase: {e}")
break

for file in files:
file_path = file['name']
anomaly_id = file_path.split('/')[0] # Extract the anomaly ID from the file path
file_name = file_path.split('/')[-1] # Extract the file name

# Create a directory for the anomaly ID if it doesn't exist
anomaly_folder = os.path.join(initial_folder, anomaly_id)
if not os.path.exists(anomaly_folder):
os.makedirs(anomaly_folder)

# Construct the public URL for the file
public_file_url = f'{public_base_url}{file_path}'
local_file_path = os.path.join(anomaly_folder, file_name)

try:
# Download the file from Supabase storage
response = requests.get(public_file_url)
response.raise_for_status() # Check for HTTP errors

# Save the file to the local directory
with open(local_file_path, 'wb') as f:
f.write(response.content)

logger.info(f"Downloaded {file_name} to {local_file_path}")

except requests.RequestException as e:
logger.error(f"Error downloading {file_path}: {e}")

offset += 1000

# Run the download function
if __name__ == "__main__":
download_images_from_supabase()

0 comments on commit f7569b7

Please sign in to comment.