Coverage for marshallEngine/feeders/panstarrs/data.py : 0%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#!/usr/local/bin/python
2# encoding: utf-8
3"""
4*import the panstarrs stream into the marshall*
6:Author:
7 David Young
8"""
9from builtins import str
10import sys
11import os
12os.environ['TERM'] = 'vt100'
13from fundamentals import tools
14from ..data import data as basedata
15from astrocalc.times import now
16from fundamentals.mysql import writequery
19class data(basedata):
20 """
21 *Import the PanSTARRS transient data into the marshall database*
23 **Key Arguments**
25 - ``log`` -- logger
26 - ``dbConn`` -- the marshall database connection
27 - ``settings`` -- the settings dictionary
30 **Usage**
32 To setup your logger, settings and database connections, please use the ``fundamentals`` package (`see tutorial here <http://fundamentals.readthedocs.io/en/latest/#tutorial>`_).
34 To initiate a data object, use the following:
36 ```python
37 from marshallEngine.feeders.panstarrs.data import data
38 ingester = data(
39 log=log,
40 settings=settings,
41 dbConn=dbConn
42 ).ingest(withinLastDays=withInLastDay)
43 ```
45 """
46 # Initialisation
48 def __init__(
49 self,
50 log,
51 dbConn,
52 settings=False,
53 ):
54 self.log = log
55 log.debug("instansiating a new 'data' object")
56 self.settings = settings
57 self.dbConn = dbConn
59 self.fsTableName = "fs_panstarrs"
60 self.survey = "Pan-STARRS"
62 # xt-self-arg-tmpx
64 return None
66 def ingest(
67 self,
68 withinLastDays):
69 """*Ingest the data into the marshall feeder survey table*
71 **Key Arguments**
73 - ``withinLastDays`` -- within the last number of days. *Default: 50*
75 """
76 self.log.debug('starting the ``ingest`` method')
78 allLists = []
79 csvDicts = self.get_csv_data(
80 url=self.settings["panstarrs urls"]["ps13pi"]["summary csv"],
81 user=self.settings["credentials"]["ps13pi"]["username"],
82 pwd=self.settings["credentials"]["ps13pi"]["password"]
83 )
84 allLists.extend(self._clean_data_pre_ingest(
85 surveyName="ps13pi", withinLastDays=withinLastDays))
87 csvDicts = self.get_csv_data(
88 url=self.settings["panstarrs urls"]["ps13pi"]["recurrence csv"],
89 user=self.settings["credentials"]["ps13pi"]["username"],
90 pwd=self.settings["credentials"]["ps13pi"]["password"]
91 )
92 allLists.extend(self._clean_data_pre_ingest(
93 surveyName="ps13pi", withinLastDays=withinLastDays))
95 csvDicts = self.get_csv_data(
96 url=self.settings["panstarrs urls"]["ps23pi"]["summary csv"],
97 user=self.settings["credentials"]["ps23pi"]["username"],
98 pwd=self.settings["credentials"]["ps23pi"]["password"]
99 )
100 allLists.extend(self._clean_data_pre_ingest(
101 surveyName="ps23pi", withinLastDays=withinLastDays))
103 csvDicts = self.get_csv_data(
104 url=self.settings["panstarrs urls"]["ps23pi"]["recurrence csv"],
105 user=self.settings["credentials"]["ps23pi"]["username"],
106 pwd=self.settings["credentials"]["ps23pi"]["password"]
107 )
108 allLists.extend(self._clean_data_pre_ingest(
109 surveyName="ps23pi", withinLastDays=withinLastDays))
111 csvDicts = self.get_csv_data(
112 url=self.settings["panstarrs urls"]["pso3"]["summary csv"],
113 user=self.settings["credentials"]["pso3"]["username"],
114 pwd=self.settings["credentials"]["pso3"]["password"]
115 )
116 allLists.extend(self._clean_data_pre_ingest(
117 surveyName="pso3", withinLastDays=withinLastDays))
118 csvDicts = self.get_csv_data(
119 url=self.settings["panstarrs urls"]["pso3"]["recurrence csv"],
120 user=self.settings["credentials"]["pso3"]["username"],
121 pwd=self.settings["credentials"]["pso3"]["password"]
122 )
123 allLists.extend(self._clean_data_pre_ingest(
124 surveyName="pso3", withinLastDays=withinLastDays))
126 self.dictList = allLists
127 self._import_to_feeder_survey_table()
129 self.insert_into_transientBucket()
131 # FIX ODD PANSTARRS COORDINATES
132 sqlQuery = """update transientBucket set raDeg = raDeg+360.0 where raDeg < 0;""" % locals()
133 writequery(
134 log=self.log,
135 sqlQuery=sqlQuery,
136 dbConn=self.dbConn
137 )
139 # CLEAN UP TASKS TO MAKE THE TICKET UPDATE
140 self.clean_up()
142 self.log.debug('completed the ``ingest`` method')
143 return None
145 def _clean_data_pre_ingest(
146 self,
147 surveyName,
148 withinLastDays=False):
149 """*clean up the list of dictionaries containing the PS data, pre-ingest*
151 **Key Arguments**
153 - ``surveyName`` -- the PS survey name
154 - ``withinLastDays`` -- the lower limit of observations to include (within the last N days from now). Default *False*, i.e. no limit
157 **Return**
159 - ``dictList`` -- the cleaned list of dictionaries ready for ingest
162 **Usage**
164 To clean the data from the PS 3pi survey:
166 ```python
167 dictList = ingesters._clean_data_pre_ingest(surveyName="3pi")
168 ```
170 Note you will also be able to access the data via ``ingester.dictList``
172 """
173 self.log.debug('starting the ``_clean_data_pre_ingest`` method')
175 self.dictList = []
177 # CALC MJD LIMIT
178 if withinLastDays:
179 mjdLimit = now(
180 log=self.log
181 ).get_mjd() - float(withinLastDays)
183 for row in self.csvDicts:
184 # IF NOW IN THE LAST N DAYS - SKIP
185 if withinLastDays and float(row["mjd_obs"]) < mjdLimit:
186 continue
187 if float(row["ra_psf"]) < 0:
188 row["ra_psf"] = 360. + float(row["ra_psf"])
189 thisDictionary = {}
191 thisDictionary["candidateID"] = row["ps1_designation"]
192 thisDictionary["ra_deg"] = row["ra_psf"]
193 thisDictionary["dec_deg"] = row["dec_psf"]
194 thisDictionary["mag"] = row["cal_psf_mag"]
195 thisDictionary["magerr"] = row["psf_inst_mag_sig"]
196 thisDictionary["observationMJD"] = row["mjd_obs"]
197 thisDictionary["filter"] = row["filter"]
199 try:
200 thisDictionary["discDate"] = row["followup_flag_date"]
201 except:
202 pass
203 thisDictionary["discMag"] = row["cal_psf_mag"]
205 if "transient_object_id" in list(row.keys()):
206 thisDictionary[
207 "objectURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/psdb/candidate/" % locals() + row["transient_object_id"]
208 else:
209 thisDictionary[
210 "objectURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/psdb/candidate/" % locals() + row["id"]
212 # CLEAN UP IMAGE URLS
213 target = row["target"]
214 if target:
215 id, mjdString, diffId, ippIdet, type = target.split('_')
216 thisDictionary["targetImageURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/media/images/data/%(surveyName)s" % locals() + '/' + \
217 str(int(float(mjdString))) + '/' + target + '.jpeg'
219 ref = row["ref"]
220 if ref:
221 id, mjdString, diffId, ippIdet, type = ref.split('_')
222 thisDictionary["refImageURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/media/images/data/%(surveyName)s" % locals() + '/' + \
223 str(int(float(mjdString))) + '/' + ref + '.jpeg'
225 diff = row["diff"]
226 if diff:
227 id, mjdString, diffId, ippIdet, type = diff.split('_')
228 thisDictionary["diffImageURL"] = "http://star.pst.qub.ac.uk/sne/%(surveyName)s/media/images/data/%(surveyName)s" % locals() + '/' + \
229 str(int(float(mjdString))) + '/' + diff + '.jpeg'
231 self.dictList.append(thisDictionary)
233 self.log.debug('completed the ``_clean_data_pre_ingest`` method')
234 return self.dictList
236 # use the tab-trigger below for new method
237 # xt-class-method