Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Unique identifier for predict events #1054

Merged
merged 2 commits into from
Aug 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions backend/tno/migrations/0006_auto_20240822_1847.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Generated by Django 3.2.18 on 2024-08-22 18:47

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("tno", "0005_auto_20240804_0208"),
]

operations = [
migrations.AddField(
model_name="occultation",
name="hash_id",
field=models.CharField(
blank=True,
default=None,
help_text="Unique hash identifier for the prediction event",
max_length=26,
null=True,
unique=True,
verbose_name="Hash ID",
),
),
migrations.AddIndex(
model_name="occultation",
index=models.Index(
fields=["hash_id"], name="tno_occulta_hash_id_ff09b7_idx"
),
),
]
16 changes: 15 additions & 1 deletion backend/tno/models/occultation.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,15 @@
# TODO Squash Migrations:
# https://coderbook.com/@marcus/how-to-squash-and-merge-django-migrations/
class Occultation(models.Model):

hash_id = models.CharField(
max_length=26,
verbose_name="Hash ID",
null=True,
blank=True,
default=None,
help_text="Unique hash identifier for the prediction event",
unique=True,
)
# -------------------------------------------------
# Identificação do Objeto
# -------------------------------------------------
Expand Down Expand Up @@ -91,6 +99,7 @@ class Occultation(models.Model):
# -------------------------------------------------
# Informações da prediçao
# -------------------------------------------------

date_time = models.DateTimeField(
verbose_name="Date Time", auto_now_add=False, null=False, blank=False
)
Expand Down Expand Up @@ -963,6 +972,11 @@ class Meta:
]
),
# event indexes
models.Index(
fields=[
"hash_id",
]
),
models.Index(
fields=[
"date_time",
Expand Down
8 changes: 8 additions & 0 deletions backend/tno/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,17 +91,25 @@ class Meta:


class OccultationSerializer(serializers.ModelSerializer):
id = serializers.SerializerMethodField()
map_url = serializers.SerializerMethodField()

class Meta:
model = Occultation
exclude = (
"hash_id",
"occ_path_min_longitude",
"occ_path_max_longitude",
"occ_path_min_latitude",
"occ_path_max_latitude",
)

def get_id(self, obj):
if obj.hash_id != None:
return obj.hash_id
else:
return obj.id

def get_map_url(self, obj):
request = self.context.get("request")
relative_url = obj.get_map_relative_url()
Expand Down
72 changes: 43 additions & 29 deletions backend/tno/views/occultation.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from dateutil.relativedelta import relativedelta
from django.conf import settings
from django.db.models import F, FloatField, Q, Value
from django.shortcuts import get_object_or_404
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework import serializers, viewsets
from rest_framework.decorators import action
Expand All @@ -29,6 +30,9 @@ class CharInFilter(django_filters.BaseInFilter, django_filters.CharFilter):

class OccultationFilter(django_filters.FilterSet):

hash_id = django_filters.CharFilter(
label="Hash ID", field_name="hash_id", lookup_expr="exact"
)
date_time = django_filters.DateTimeFromToRangeFilter()

name = CharInFilter(field_name="name", lookup_expr="in")
Expand Down Expand Up @@ -142,6 +146,8 @@ class Meta:
class OccultationViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = [AllowAny]

lookup_field = "hash_id"

queryset = Occultation.objects.all()
serializer_class = OccultationSerializer

Expand Down Expand Up @@ -211,33 +217,41 @@ def check_user_location_params(self, params):

return lat, long, radius

# TODO: Estudar a possibilidade de um metodo asyncrono
# Teste Com metodo assincrono pode ser promissor!
# if None not in [lat, long, radius]:
# # print(f"Latitude: {lat} Longitude: {long} Radius: {radius}")
# job = group(
# assync_visibility_from_coeff.s(
# event_id=event.id,
# latitude=lat,
# longitude=long,
# radius=radius,
# date_time=event.date_time.isoformat(),
# inputdict=event.occ_path_coeff,
# # object_diameter=event.diameter,
# # ring_diameter=event.diameter,
# # n_elements= 1500,
# # ignore_nighttime= False,
# # latitudinal= False
# ) for event in queryset)

# result = job.apply_async()
# while result.ready() == False:
# print(f"Completed: {result.completed_count()}")
# sleep(1)
# t1 = datetime.now()
# dt = t1 - t0
# logger.info(f"Query Completed in {humanize.naturaldelta(dt)}")
# return queryset
# TODO: Remover este metodo depois que todas as predições tiverem hash_id.
# Este metodo sobrescreve o metodo base que retorna o objeto pela lookup_field na url.
# Este metodo foi criado para permitir a busca pelo ID do objeto no banco de dados visando manter a compatibilidade com urls antigas.
def get_object(self):
"""
Returns the object the view is displaying.

You may want to override this if you need to provide non-standard
queryset lookups. Eg if objects are referenced using multiple
keyword arguments in the url conf.
"""
queryset = self.filter_queryset(self.get_queryset())

# Perform the lookup filtering.
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field

assert lookup_url_kwarg in self.kwargs, (
"Expected view %s to be called with a URL keyword argument "
'named "%s". Fix your URL conf, or set the `.lookup_field` '
"attribute on the view correctly."
% (self.__class__.__name__, lookup_url_kwarg)
)

try:
id = int(self.kwargs[lookup_url_kwarg])
filter_kwargs = {"pk": id}
except:
filter_kwargs = {self.lookup_field: self.kwargs[lookup_url_kwarg]}

obj = get_object_or_404(queryset, **filter_kwargs)

# May raise a permission denied
self.check_object_permissions(self.request, obj)

return obj

def list(self, request):
t0 = datetime.now()
Expand Down Expand Up @@ -369,7 +383,7 @@ def highlights_maps_stats(self, request):
},
)
@action(detail=True, methods=["get"], permission_classes=(AllowAny,))
def get_or_create_map(self, request, pk):
def get_or_create_map(self, request, pk=None, hash_id=None):
"""Retorna o mapa para o evento de ocultação.

Verifica se já existe mapa para o evento especifico para o ID.
Expand Down Expand Up @@ -500,7 +514,7 @@ def asteroids_with_prediction(self, request):

@extend_schema(exclude=True)
@action(detail=True, methods=["get"], permission_classes=(AllowAny,))
def get_star_by_event(self, request, pk=None):
def get_star_by_event(self, request, pk=None, hash_id=None):
pre_occ = self.get_object()

source_id = pre_occ.gaia_source_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,6 @@ function PredictEventGrid() {
return (
<React.Fragment>
<Grid container spacing={2}>
{/* <Grid item xs={12}>
<Stack direction='row' justifyContent='center' alignItems='center' spacing={1}>
<CircularProgress size='1rem' />
<Typography variant='body2' sx={{ mb: 2 }} color='text.secondary'>
{message}
</Typography>
</Stack>
</Grid> */}
<Grid item xs={12}>
<Skeleton variant='rectangular' width={'100%'} height={250} />
</Grid>
Expand Down
7 changes: 6 additions & 1 deletion frontend/src/services/api/Occultation.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,12 @@ export const filter_by_location = ({
return api.get('/occultations/filter_location/', { params })
}

export const getOccultationById = ({ id }) => api.get(`/occultations/${id}`).then((res) => res.data)
export const getOccultationById = ({ id }) => {
if (!id) {
return
}
return api.get(`/occultations/${id}`).then((res) => res.data)
}

export const getNextTwenty = ({ page, pageSize, ordering }) => {
const params = {
Expand Down
2 changes: 2 additions & 0 deletions predict_occultation/src/asteroid/asteroid.py
Original file line number Diff line number Diff line change
Expand Up @@ -981,6 +981,7 @@ def register_occultations(self, start_period: str, end_period: str, jobid: int):
"closest_approach_uncertainty",
"moon_illuminated_fraction",
"probability_of_centrality",
"hash_id",
]
)

Expand All @@ -996,6 +997,7 @@ def register_occultations(self, start_period: str, end_period: str, jobid: int):
)
data.seek(0)

# rowcount = dao.import_occultations(list(df.columns), data)
rowcount = dao.import_occultations(data)

del df
Expand Down
8 changes: 7 additions & 1 deletion predict_occultation/src/dao/occultation.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,18 @@ def delete_by_asteroid_name_period(
)
return rows

# def import_occultations(self, columns: list, data):
def import_occultations(self, data):

# Sql Copy com todas as colunas que vão ser importadas e o formato do csv.
# IMPORTANTE! A ORDEM DAS COLUNAS PRECISA SER IDENTICA A COMO ESTA NO DB!
# sql = (
# f"COPY {self.tbl} ({', '.join(columns)}) "
# "FROM STDIN with (FORMAT CSV, DELIMITER '|', HEADER);"
# )

sql = (
f"COPY {self.tbl} (name, number, date_time, gaia_source_id, ra_star_candidate, dec_star_candidate, ra_target, dec_target, closest_approach, position_angle, velocity, delta, g, j_star, h, k_star, long, loc_t, off_ra, off_dec, proper_motion, ct, multiplicity_flag, e_ra, e_dec, pmra, pmdec, ra_star_deg, dec_star_deg, ra_target_deg, dec_target_deg, created_at, apparent_diameter, aphelion, apparent_magnitude, dec_star_to_date, dec_star_with_pm, dec_target_apparent, diameter, e_dec_target, e_ra_target, eccentricity, ephemeris_version, g_mag_vel_corrected, h_mag_vel_corrected, inclination, instant_uncertainty, magnitude_drop, perihelion, ra_star_to_date, ra_star_with_pm, ra_target_apparent, rp_mag_vel_corrected, semimajor_axis, have_path_coeff, occ_path_max_longitude, occ_path_min_longitude, occ_path_coeff, occ_path_is_nightside, occ_path_max_latitude, occ_path_min_latitude, base_dynclass, bsp_planetary, bsp_source, catalog, dynclass, job_id, leap_seconds, nima, obs_source, orb_ele_source, predict_step, albedo, albedo_err_max, albedo_err_min, alias, arg_perihelion, astorb_dynbaseclass, astorb_dynsubclass, density, density_err_max, density_err_min, diameter_err_max, diameter_err_min, epoch, last_obs_included, long_asc_node, mass, mass_err_max, mass_err_min, mean_anomaly, mean_daily_motion, mpc_critical_list, pha_flag, principal_designation, rms, g_star, h_star, event_duration, moon_separation, sun_elongation, closest_approach_uncertainty, moon_illuminated_fraction, probability_of_centrality) "
f"COPY {self.tbl} (name, number, date_time, gaia_source_id, ra_star_candidate, dec_star_candidate, ra_target, dec_target, closest_approach, position_angle, velocity, delta, g, j_star, h, k_star, long, loc_t, off_ra, off_dec, proper_motion, ct, multiplicity_flag, e_ra, e_dec, pmra, pmdec, ra_star_deg, dec_star_deg, ra_target_deg, dec_target_deg, created_at, apparent_diameter, aphelion, apparent_magnitude, dec_star_to_date, dec_star_with_pm, dec_target_apparent, diameter, e_dec_target, e_ra_target, eccentricity, ephemeris_version, g_mag_vel_corrected, h_mag_vel_corrected, inclination, instant_uncertainty, magnitude_drop, perihelion, ra_star_to_date, ra_star_with_pm, ra_target_apparent, rp_mag_vel_corrected, semimajor_axis, have_path_coeff, occ_path_max_longitude, occ_path_min_longitude, occ_path_coeff, occ_path_is_nightside, occ_path_max_latitude, occ_path_min_latitude, base_dynclass, bsp_planetary, bsp_source, catalog, dynclass, job_id, leap_seconds, nima, obs_source, orb_ele_source, predict_step, albedo, albedo_err_max, albedo_err_min, alias, arg_perihelion, astorb_dynbaseclass, astorb_dynsubclass, density, density_err_max, density_err_min, diameter_err_max, diameter_err_min, epoch, last_obs_included, long_asc_node, mass, mass_err_max, mass_err_min, mean_anomaly, mean_daily_motion, mpc_critical_list, pha_flag, principal_designation, rms, g_star, h_star, event_duration, moon_separation, sun_elongation, closest_approach_uncertainty, moon_illuminated_fraction, probability_of_centrality, hash_id) "
"FROM STDIN with (FORMAT CSV, DELIMITER '|', HEADER);"
)

Expand Down
43 changes: 43 additions & 0 deletions predict_occultation/src/predict_occultation/pipeline/library.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
#!/usr/bin/python2.7
# -*- coding: utf-8 -*-
import base64
import hashlib
import os
import re
from datetime import datetime, timedelta

import astropy.units as u
import numpy as np
Expand All @@ -11,6 +14,46 @@
from scipy.interpolate import interp1d


def normalize_to_nearest_hour(dt):
# Ensure input is a datetime object
if not isinstance(dt, datetime):
raise TypeError("Input must be a datetime object")

# Extract the minute component
minute = dt.minute

# If minutes >= 30, round up to the next hour
if minute >= 30:
dt = dt + timedelta(hours=1)

# Normalize to the nearest hour by setting minutes and seconds to zero
normalized_dt = dt.replace(minute=0, second=0, microsecond=0)

return normalized_dt


def generate_hash(name: str, source_id: int, date_time: datetime):
"""
Generates a hash based on the given parameters.

Args:
name (str): The name parameter.
source_id (int): The source ID parameter.
date_time (datetime): The date and time parameter.

Returns:
str: The generated hash.

"""
# Convert date and time of event to the nearest hour
nearest_hour = normalize_to_nearest_hour(date_time)
# Generate the identifier string with asteroid name, star gaia source id, and nearest hour
identifier = f"{name} {source_id} {nearest_hour.strftime('%Y-%m-%d %H:%M:%S')}"
md5 = hashlib.md5(identifier.encode("utf-8")).digest()
hash = base64.urlsafe_b64encode(md5).decode("utf-8").rstrip("=")
return hash


def check_leapsec(filename):
"""
Verifica se o arquivo leapSec existe
Expand Down
Loading
Loading