Coverage for marshallEngine/feeders/atlas/data.py : 0%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#!/usr/local/bin/python
2# encoding: utf-8
3"""
4*import the ATLAS stream into the marshall*
6:Author:
7 David Young
8"""
9from builtins import str
10import sys
11import os
12os.environ['TERM'] = 'vt100'
13from fundamentals import tools
14from ..data import data as basedata
15from astrocalc.times import now
16from astrocalc.times import conversions
17from fundamentals.mysql import writequery
18from marshallEngine.feeders.atlas.lightcurve import generate_atlas_lightcurves
19from datetime import datetime, date, time, timedelta
22class data(basedata):
23 """
24 *Import the ATLAS transient data into the marshall database*
26 **Key Arguments**
28 - ``log`` -- logger
29 - ``dbConn`` -- the marshall database connection
30 - ``settings`` -- the settings dictionary
32 **Usage**
34 To setup your logger, settings and database connections, please use the ``fundamentals`` package (`see tutorial here <http://fundamentals.readthedocs.io/en/latest/#tutorial>`_).
36 To initiate a data object, use the following:
38 ```python
39 from marshallEngine.feeders.atlas.data import data
40 ingester = data(
41 log=log,
42 settings=settings,
43 dbConn=dbConn
44 ).ingest(withinLastDays=withInLastDay)
45 ```
47 """
48 # Initialisation
50 def __init__(
51 self,
52 log,
53 dbConn,
54 settings=False,
55 ):
56 self.log = log
57 log.debug("instansiating a new 'data' object")
58 self.settings = settings
59 self.dbConn = dbConn
61 self.fsTableName = "fs_atlas"
62 self.survey = "ATLAS"
64 # xt-self-arg-tmpx
66 return None
68 def ingest(
69 self,
70 withinLastDays):
71 """*Ingest the data into the marshall feeder survey table*
73 **Key Arguments**
75 - ``withinLastDays`` -- within the last number of days. *Default: 50*
77 """
78 self.log.debug('starting the ``ingest`` method')
80 timelimit = datetime.now() - timedelta(days=int(withinLastDays))
81 timelimit = timelimit.strftime("%Y-%m-%d")
83 csvDicts = self.get_csv_data(
84 url=self.settings["atlas urls"]["summary csv"] + f"?followup_flag_date__gte={timelimit}"
85 )
87 self._clean_data_pre_ingest(
88 surveyName="ATLAS", withinLastDays=withinLastDays)
90 self._import_to_feeder_survey_table()
91 self.insert_into_transientBucket(updateTransientSummaries=False)
93 sqlQuery = """call update_fs_atlas_forced_phot()""" % locals()
94 writequery(
95 log=self.log,
96 sqlQuery=sqlQuery,
97 dbConn=self.dbConn
98 )
100 self.fsTableName = "fs_atlas_forced_phot"
101 self.survey = "ATLAS FP"
103 sqlQuery = """CALL update_transientBucket_atlas_sources()""" % locals()
104 writequery(
105 log=self.log,
106 sqlQuery=sqlQuery,
107 dbConn=self.dbConn
108 )
110 self.insert_into_transientBucket(importUnmatched=False)
112 # UPDATE THE ATLAS SPECIFIC FLUX SPACE LIGHTCURVES
113 generate_atlas_lightcurves(
114 log=self.log,
115 dbConn=self.dbConn,
116 settings=self.settings
117 )
119 # CLEAN UP TASKS TO MAKE THE TICKET UPDATE
120 self.clean_up()
122 self.log.debug('completed the ``ingest`` method')
123 return None
125 def _clean_data_pre_ingest(
126 self,
127 surveyName,
128 withinLastDays=False):
129 """*clean up the list of dictionaries containing the ATLAS data, pre-ingest*
131 **Key Arguments**
133 - ``surveyName`` -- the ATLAS survey name
134 - ``withinLastDays`` -- the lower limit of observations to include (within the last N days from now). Default *False*, i.e. no limit
137 **Return**
139 - ``dictList`` -- the cleaned list of dictionaries ready for ingest
142 **Usage**
144 To clean the data from the ATLAS survey:
146 ```python
147 dictList = ingesters._clean_data_pre_ingest(surveyName="ATLAS")
148 ```
150 Note you will also be able to access the data via ``ingester.dictList``
152 """
153 self.log.debug('starting the ``_clean_data_pre_ingest`` method')
155 self.dictList = []
157 # CALC MJD LIMIT
158 if withinLastDays:
159 mjdLimit = now(
160 log=self.log
161 ).get_mjd() - float(withinLastDays)
163 # CONVERTER TO CONVERT MJD TO DATE
164 converter = conversions(
165 log=self.log
166 )
168 for row in self.csvDicts:
169 # IF NOW IN THE LAST N DAYS - SKIP
170 flagMjd = converter.ut_datetime_to_mjd(
171 utDatetime=row["followup_flag_date"])
173 if withinLastDays and (float(row["earliest_mjd"]) < mjdLimit and float(flagMjd) < mjdLimit):
174 continue
176 # MASSAGE THE DATA IN THE INPUT FORMAT TO WHAT IS NEEDED IN THE
177 # FEEDER SURVEY TABLE IN THE DATABASE
178 target = row["target"]
179 diff = row["diff"]
180 ref = row["ref"]
181 targetImageURL = None
182 refImageURL = None
183 diffImageURL = None
185 if target:
186 mjdStr = str(int(float(target.split("_")[1])))
187 if target:
188 iid, mjdString, diffId, ippIdet, type = target.split('_')
189 targetImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/media/images/data/atlas4/" % locals() + '/' + \
190 mjdStr + '/' + target + '.jpeg'
191 objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid
193 if ref:
194 mjdStr = str(int(float(ref.split("_")[1])))
195 if ref:
196 iid, mjdString, diffId, ippIdet, type = ref.split('_')
197 refImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/media/images/data/atlas4/" % locals() + '/' + \
198 mjdStr + '/' + ref + '.jpeg'
199 objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid
201 if diff:
202 mjdStr = str(int(float(diff.split("_")[1])))
203 if diff:
204 iid, mjdString, diffId, ippIdet, type = diff.split('_')
205 diffImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/media/images/data/atlas4/" % locals() + '/' + \
206 mjdStr + '/' + diff + '.jpeg'
207 objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid
209 discDate = converter.mjd_to_ut_datetime(
210 mjd=row["earliest_mjd"], sqlDate=True)
212 thisDictionary = {}
213 thisDictionary["candidateID"] = row["name"]
214 thisDictionary["ra_deg"] = row["ra"]
215 thisDictionary["dec_deg"] = row["dec"]
216 thisDictionary["mag"] = row["earliest_mag"]
217 thisDictionary["observationMJD"] = row["earliest_mjd"]
218 thisDictionary["filter"] = row["earliest_filter"]
219 thisDictionary["discDate"] = discDate
220 thisDictionary["discMag"] = row["earliest_mag"]
221 thisDictionary["suggestedType"] = row["object_classification"]
222 thisDictionary["targetImageURL"] = targetImageURL
223 thisDictionary["refImageURL"] = refImageURL
224 thisDictionary["diffImageURL"] = diffImageURL
225 thisDictionary["objectURL"] = objectURL
227 self.dictList.append(thisDictionary)
229 self.log.debug('completed the ``_clean_data_pre_ingest`` method')
230 return self.dictList
232 # use the tab-trigger below for new method
233 # xt-class-method