0% found this document useful (0 votes)
4 views4 pages

Difference_Image_Analysis

Copyright
© © All Rights Reserved
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
Download as txt, pdf, or txt
0% found this document useful (0 votes)
4 views4 pages

Difference_Image_Analysis

Copyright
© © All Rights Reserved
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
Download as txt, pdf, or txt
Download as txt, pdf, or txt
You are on page 1/ 4

import os

import requests
import builtins
import getpass
import time
import pandas as pd

# === 1. Set your IRSA credentials early ===


os.environ["IRSA_USER"] = "mainhanshiyangemail@gmail.com"
os.environ["IRSA_PASSWORD"] = "23593487674428"

# === 2. Monkey-patch input() and getpass.getpass() to avoid interactive prompts


===
def auto_input(prompt=""):
if "irsa login" in prompt.lower():
print(prompt + " mainhanshiyangemail@gmail.com")
return "mainhanshiyangemail@gmail.com"
return ""
def auto_getpass(prompt=""):
if "password" in prompt.lower():
print(prompt + " (hidden)")
return "23593487674428"
return ""
builtins.input = auto_input
getpass.getpass = auto_getpass

# === 3. Override ztfquery's cookie retrieval using IRSA SSO with cookie login ===
from ztfquery import io as ztfio

def get_irsa_cookie_wrapper(*args, **kwargs):


login_url = "https://irsa.ipac.caltech.edu/account/signon/login.do"
params = {
"josso_cmd": "login",
"josso_username": os.environ["IRSA_USER"],
"josso_password": os.environ["IRSA_PASSWORD"],
}
session = requests.Session()
r = session.get(login_url, params=params)
if r.status_code != 200:
raise Exception("IRSA login failed, HTTP status code " +
str(r.status_code))
cookies = r.cookies
# Ensure that cookie domains have a leading dot.
for domain in list(cookies._cookies.keys()):
if not domain.startswith("."):
cookies._cookies["." + domain] = cookies._cookies[domain]
print("Obtained IRSA cookie:", "; ".join([f"{k}={v}" for k, v in
cookies.get_dict().items()]))
return cookies

ztfio.get_cookie = get_irsa_cookie_wrapper
# Force the account setup (bypassing the initial test)
ztfio.set_account("irsa", token_based=False, force=True)

# === 4. Import the ztfquery modules ===


from ztfquery import query, lightcurve

def parse_obj_name(obj_name):
"""
Convert an object name in the form 'SMDGhhmmss+ddmmss' into RA and Dec (in
decimal degrees).
For example: "SMDG1230327+063157" → RA ≈188.86250°, Dec ≈6.53250°.
"""
if obj_name.startswith("SMDG"):
obj_name = obj_name[4:]
for i, ch in enumerate(obj_name):
if ch in ['+', '-']:
split_index = i
break
else:
raise ValueError("Invalid object name format; no '+' or '-' found.")
ra_str = obj_name[:split_index]
dec_str = obj_name[split_index:]
hrs = int(ra_str[0:2])
mins = int(ra_str[2:4])
secs = float(ra_str[4:])
ra_deg = (hrs + mins/60.0 + secs/3600.0) * 15.0
sign = 1 if dec_str[0] == '+' else -1
deg = int(dec_str[1:3])
mins_dec = int(dec_str[3:5])
secs_dec = int(dec_str[5:7])
dec_deg = sign * (deg + mins_dec/60.0 + secs_dec/3600.0)
return ra_deg, dec_deg

def clean_lightcurve_data(lc_data):
"""
Apply basic cleaning to the lightcurve data.
- Remove points with bad quality flags.
- Remove outliers (e.g., points with large magnitude errors).
"""
# Example: Remove points with bad quality flags (you can customize this)
lc_data = lc_data[lc_data["catflags"] == 0]

# Example: Remove outliers based on magnitude error (you can customize this)
lc_data = lc_data[lc_data["sigmamag"] < 0.1]

return lc_data

def process_object(obj_name, base_dir):


print(f"\n=== Processing object: {obj_name} ===")
try:
ra, dec = parse_obj_name(obj_name)
except Exception as e:
print(f"Error parsing coordinates for {obj_name}: {e}")
return
print(f"Parsed coordinates: RA = {ra:.5f}°, Dec = {dec:.5f}°")

# Create a subfolder for this object's pipeline products.


obj_folder = os.path.join(base_dir, obj_name)
os.makedirs(obj_folder, exist_ok=True)
print(f"Pipeline products will be saved in: {obj_folder}")

# --- Query metadata and download pipeline products ---


zquery = query.ZTFQuery()
image_size_deg = 60.0 / 3600.0 # 60 arcsec = ~0.01667 degrees
print(f"Querying metadata with image cutout size = {image_size_deg:.5f}
degrees...")
zquery.load_metadata(radec=[ra, dec], size=image_size_deg)
n_entries = len(zquery.metatable)
print(f"Metadata query complete. Found {n_entries} entries.")

# Define the desired products.


products = ["scimrefdiffimg.fits.fz", "diffimlog.txt", "sciimg.fits"]
nproc = 8 # Increase parallel processing.

for product in products:


print(f"\nStarting download for product: {product}")
try:
zquery.download_data(product,
show_progress=True,
nprocess=nproc,
verbose=True,
overwrite=True,
download_dir=obj_folder)
except Exception as e:
print(f"Error downloading {product} for {obj_name}: {e}")
time.sleep(1)

# --- Query and save the lightcurve ---


try:
print("Querying lightcurve...")
# Query the lightcurve by position with a radius in arcsec (here using 5
arcsec as an example)
lcq = lightcurve.LCQuery.from_position(ra, dec, 5)
if lcq.data is not None and not lcq.data.empty:
# Save the raw lightcurve data
lc_csv_raw = os.path.join(base_dir, f"{obj_name}_lightcurve_raw.csv")
lcq.data.to_csv(lc_csv_raw, index=False)
print(f"Raw lightcurve data saved to: {lc_csv_raw}")

# Clean the lightcurve data


lc_data_cleaned = clean_lightcurve_data(lcq.data)

# Save the cleaned lightcurve data


lc_csv_cleaned = os.path.join(base_dir,
f"{obj_name}_lightcurve_cleaned.csv")
lc_data_cleaned.to_csv(lc_csv_cleaned, index=False)
print(f"Cleaned lightcurve data saved to: {lc_csv_cleaned}")
else:
print(f"No lightcurve data found for {obj_name}.")
except Exception as e:
print(f"Error querying lightcurve for {obj_name}: {e}")

def main():
# === Set the base output folder on Desktop ===
desktop = os.path.join(os.path.expanduser("~"), "Desktop")
base_folder = os.path.join(desktop, "ZTF_Downloads")
os.makedirs(base_folder, exist_ok=True)
print(f"Base output folder: {base_folder}")

# === Define your objects here (no interactive prompt) ===


# Edit the list below with the object names you want to process.
object_names = ["SMDG1231259+082003"] # Example: add as many as needed.

if not object_names:
print("No valid object names provided. Exiting.")
return
for obj_name in object_names:
process_object(obj_name, base_folder)
time.sleep(2) # Optional pause between objects.

print("\nAll objects processed.")

if __name__ == "__main__":
main()

You might also like