Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/changes/563.new.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Pulled target photometry from ANTARES when creating a target using the antares2goats extension.
7 changes: 6 additions & 1 deletion src/goats_tom/api_views/antares2goats.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""View set to handle adding items from the browser extension."""

__all__ = ["Antares2GoatsViewSet"]

import logging
from datetime import datetime

from django.db import IntegrityError
Expand All @@ -16,6 +16,8 @@

from goats_tom.serializers import Antares2GoatsSerializer

logger = logging.getLogger(__name__)


class Antares2GoatsViewSet(GenericViewSet, mixins.CreateModelMixin):
# FIXME: Hack until tomtoolkit merges in PR.
Expand Down Expand Up @@ -74,6 +76,9 @@ def perform_create(self, serializer: Antares2GoatsSerializer) -> None:
# Convert the generic alert into target format.
target, extras, aliases = broker_class.to_target(alert)
target.save(extras=extras, names=aliases)
lightcurve_data = broker_class.process_lightcurve_data(alert=alert)
dp = broker_class.create_lightcurve_dp(target, lightcurve_data)
broker_class.create_reduced_datums(dp)

elif "esquery" in serializer.validated_data:
query = serializer.validated_data["esquery"]
Expand Down
166 changes: 156 additions & 10 deletions src/goats_tom/brokers/antares.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,18 @@
from typing import Any, Iterator

import marshmallow
import pandas as pd
from astropy.time import Time, TimezoneInfo
from crispy_forms.layout import HTML, Div, Fieldset, Layout
from django import forms
from django.core.files.base import ContentFile
from django.db import IntegrityError, transaction
from django.forms.widgets import Textarea
from django.templatetags.static import static
from tom_alerts.alerts import GenericAlert, GenericBroker, GenericQueryForm
from tom_dataproducts.data_processor import run_data_processor
from tom_dataproducts.exceptions import InvalidFileFormatException
from tom_dataproducts.models import DataProduct, ReducedDatum
from tom_targets.models import BaseTarget, Target, TargetName

from goats_tom.antares_client.client import get_by_id, search
Expand Down Expand Up @@ -120,16 +126,16 @@ def __init__(self, *args, **kwargs):

def clean(self):
"""Cleans the data of the "query" field and validates it.
GG
Returns
-------
dict
The cleaned data of the form.

Returns
-------
dict
The cleaned data of the form.

Raises
------
forms.ValidationError
Raised if the "query" field is empty.
Raises
------
forms.ValidationError
Raised if the "query" field is empty.

"""
cleaned_data = super().clean()
Expand Down Expand Up @@ -172,7 +178,7 @@ def alert_to_dict(cls, locus) -> dict[str, Any]:
"dec": locus.dec,
"properties": locus.properties,
"tags": locus.tags,
# 'lightcurve': locus.lightcurve.to_json(),
"lightcurve": locus.lightcurve,
"catalogs": locus.catalogs,
"alerts": [
{
Expand Down Expand Up @@ -341,3 +347,143 @@ def add_aliases(values: list[str] | None) -> None:
add_aliases(lsst.get("ss_object_id"))

return aliases

def process_lightcurve_data(self, alert=None):
"""
Normalize and transform ANTARES lightcurve data into a standardized DataFrame.

Parameters
----------
alert : dict, optional
Alert payload returned by the ANTARES broker. Expected to contain
a "lightcurve" key and optionally "properties.survey".

Returns
-------
pandas.DataFrame or None
Processed lightcurve DataFrame with standardized columns:
- time
- magnitude
- error
- limit
- filter
- telescope
- source

Returns None if the lightcurve is missing, invalid, or empty.
"""
alert_dict = alert or {}
lightcurve = alert_dict.get("lightcurve")

if lightcurve is None:
return

if not isinstance(lightcurve, pd.DataFrame):
try:
lightcurve = pd.DataFrame(lightcurve)
except Exception:
logger.exception("ANTARES: failed to convert lightcurve to DataFrame")
return

if lightcurve.empty:
return

lightcurve = lightcurve.drop(columns=["time", "ant_survey"], errors="ignore")

lightcurve = lightcurve.rename(
columns={
"ant_mjd": "time",
"ant_mag": "magnitude",
"ant_magerr": "error",
"ant_maglim": "limit",
"ant_passband": "filter",
}
)

try:
survey = alert_dict.get("properties", {}).get("survey", {})
telescope = list(survey.keys())[0].upper()
lightcurve["telescope"] = telescope
except Exception:
logger.exception("ANTARES: failed to extract telescope")
lightcurve["telescope"] = "UNKNOWN"

lightcurve["source"] = "ANTARES"

return lightcurve

def create_lightcurve_dp(self, target, lightcurve):
"""
Create or update a photometry DataProduct for a target.

This method ensures idempotent behavior:
- If a DataProduct with the same product_id exists, it updates the file.
- Otherwise, it creates a new DataProduct.

Parameters
----------
target : tom_targets.models.Target
Target associated with the lightcurve.
lightcurve : pandas.DataFrame
Processed lightcurve data.

Returns
-------
tom_dataproducts.models.DataProduct
The created or updated DataProduct instance.
"""
csv_string = lightcurve.to_csv(index=False)

product_id = f"{target.name}_lightcurve"
file_name = f"{target.name}_lightcurve.csv"
data = ContentFile(csv_string.encode("utf-8"), name=file_name)

try:
with transaction.atomic():
dp, created = DataProduct.objects.get_or_create(
product_id=product_id,
defaults={
"target": target,
"data_product_type": "photometry",
},
)
except IntegrityError:
dp = DataProduct.objects.get(product_id=product_id)

dp.data.save(file_name, data, save=True)
return dp

def create_reduced_datums(self, dp):
"""
Generate ReducedDatum entries from a photometry DataProduct.

This method runs the TOM data processor on the provided DataProduct.
If processing fails, it removes any partially created ReducedDatum
entries and deletes the DataProduct to maintain database consistency.

Parameters
----------
dp : tom_dataproducts.models.DataProduct
DataProduct containing the photometry file.

Raises
------
InvalidFileFormatException
If the file format is invalid for processing.
Exception
For any unexpected processing error.
"""
try:
run_data_processor(dp)

except InvalidFileFormatException:
logger.exception("ANTARES: invalid file format dp_id=%s", dp.id)
ReducedDatum.objects.filter(data_product=dp).delete()
dp.delete()
raise

except Exception:
logger.exception("ANTARES: unexpected error processing dp_id=%s", dp.id)
ReducedDatum.objects.filter(data_product=dp).delete()
dp.delete()
raise
3 changes: 0 additions & 3 deletions src/goats_tom/processors/run_data_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,12 +49,10 @@ def run_data_processor(dp, dp_type_override=None):
processor_class
)
)

data_processor = clazz()
# data returned by process_data is a list of 3-tuples: (timestamp, datum, source)
data = data_processor.process_data(dp)
data_type = data_processor.data_type_override() or data_type

# Add only the new (non-duplicate) ReducedDatum objects to the database

# 1. For quick O(1) lookup, create a hash table of existing ReducedDatum objects
Expand All @@ -67,7 +65,6 @@ def run_data_processor(dp, dp_type_override=None):
json.dumps(rd.value, sort_keys=True, skipkeys=True): 1
for rd in ReducedDatum.objects.filter(target=dp.target)
}

# 2. Create the list of new ReducedDatum objects (ready for bulk_create)
new_reduced_datums = []
skipped_data = []
Expand Down
6 changes: 3 additions & 3 deletions src/goats_tom/templates/tom_common/target-dash.html
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{% extends 'tom_common/base.html' %}
{% load comments bootstrap4 tom_common_extras targets_extras observation_extras dataproduct_extras static cache gemini %}
{% load comments bootstrap4 tom_common_extras targets_extras observation_extras dataproduct_extras static cache gemini tom_overrides %}
{% block additional_css %}
<link rel="stylesheet" href="{% static 'tom_common/css/main.css' %}">
<link rel="stylesheet" href="{% static 'tom_targets/css/main.css' %}">
Expand All @@ -17,7 +17,7 @@
{% target_data target %}
<div class="row">
<div class="col">
{% recent_photometry target limit=3 %}
{% goats_recent_photometry target limit=3 %}
</div>
</div>
{% if target.type == 'SIDEREAL' %}
Expand All @@ -44,4 +44,4 @@
{% endblock %}
</div>
</div>
{% endblock %}
{% endblock %}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
{% load tom_common_extras %}

<div class="card">
<div class="card-header px-2 d-flex justify-content-between align-items-center">
<h4 class="mb-0">Recent Photometry</h4>

<form method="post"
action="{% url 'refresh_antares_photometry' target.id %}"
class="mb-0">
{% csrf_token %}
<button type="submit" class="btn btn-sm btn-primary">
Refresh
</button>
</form>
</div>

<table class="table mb-0">
<thead>
<tr>
<th>Timestamp</th>
<th>Magnitude</th>
</tr>
</thead>
<tbody>
{% for datum in data %}
<tr>
<td>{{ datum.timestamp }}</td>
<td>
{% if datum.limit %}&gt;{% endif %}
{{ datum.magnitude|truncate_value_for_display:8 }}
</td>
</tr>
{% empty %}
<tr>
<td colspan="2">No recent photometry.</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>

23 changes: 18 additions & 5 deletions src/goats_tom/templates/tom_targets/target_detail.html
Original file line number Diff line number Diff line change
Expand Up @@ -164,8 +164,10 @@ <h6>${file}</h6>
};

// Helper function to fetch reduced data.
const fetchReducedData = async (dataproductId, dataType) => {
const url = `/api/reduceddatums/?data_product=${dataproductId}&data_type=${dataType}`;
const fetchReducedData = async (dataproductId, dataType) => {
let url = `/api/reduceddatums/?data_product=${dataproductId}&data_type=${dataType}`;
let data = null;
while (url) {
const response = await fetch(url, {
method: "GET",
headers: {
Expand All @@ -177,9 +179,20 @@ <h6>${file}</h6>
if (!response.ok) {
throw new Error(`Fetch failed with status: ${response.status}`);
}
const data = await response.json();
return data;
};
const responseData = await response.json();

if (!data) {
data = {
...responseData,
results: [...responseData.results],
};
} else {
data.results.push(...responseData.results);
}
url = responseData.next;
}
return data;
};

// Helper function to run the processor.
const runProcessor = async (dataproductId, dataType) => {
Expand Down
6 changes: 5 additions & 1 deletion src/goats_tom/templatetags/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
from .dataproduct_visualizer import dataproduct_visualizer
from .gemini import render_goa_query_form, render_launch_dragons
from .target_navbar import render_target_navbar
from .tom_overrides import goats_dataproduct_list_for_observation_saved
from .tom_overrides import (
goats_dataproduct_list_for_observation_saved,
goats_recent_photometry,
)

__all__ = [
"starts_with",
Expand All @@ -13,4 +16,5 @@
"render_target_navbar",
"dataproduct_visualizer",
"antares_url",
"goats_recent_photometry",
]
24 changes: 24 additions & 0 deletions src/goats_tom/templatetags/tom_overrides.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,3 +102,27 @@ def spectroscopy_for_target(context, target, dataproduct=None):
go.Figure(data=plot_data, layout=layout), output_type="div", show_link=False
),
}


@register.inclusion_tag("tom_dataproducts/partials/recent_photometry.html")
def goats_recent_photometry(target, limit=1):
"""
Override for TOMToolkit method.
Displays a table of the most recent photometric points for a target.
"""
photometry = ReducedDatum.objects.filter(
data_type="photometry", target=target
).order_by("-timestamp")[:limit]

data = []
for reduced_datum in photometry:
rd_data = {"timestamp": reduced_datum.timestamp}
if "limit" in reduced_datum.value.keys():
rd_data["magnitude"] = reduced_datum.value["limit"]
rd_data["limit"] = True
else:
rd_data["magnitude"] = reduced_datum.value["magnitude"]
rd_data["limit"] = False
data.append(rd_data)
context = {"target": target, "data": data}
return context
Loading