Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1#!/usr/local/bin/python 

2# encoding: utf-8 

3""" 

4*import the ATLAS stream into the marshall* 

5 

6:Author: 

7 David Young 

8""" 

9from builtins import str 

10import sys 

11import os 

12os.environ['TERM'] = 'vt100' 

13from fundamentals import tools 

14from ..data import data as basedata 

15from astrocalc.times import now 

16from astrocalc.times import conversions 

17from fundamentals.mysql import writequery 

18from marshallEngine.feeders.atlas.lightcurve import generate_atlas_lightcurves 

19from datetime import datetime, date, time, timedelta 

20 

21 

22class data(basedata): 

23 """ 

24 *Import the ATLAS transient data into the marshall database* 

25 

26 **Key Arguments** 

27 

28 - ``log`` -- logger 

29 - ``dbConn`` -- the marshall database connection 

30 - ``settings`` -- the settings dictionary 

31 

32 **Usage** 

33 

34 To setup your logger, settings and database connections, please use the ``fundamentals`` package (`see tutorial here <http://fundamentals.readthedocs.io/en/latest/#tutorial>`_).  

35 

36 To initiate a data object, use the following: 

37 

38 ```python 

39 from marshallEngine.feeders.atlas.data import data 

40 ingester = data( 

41 log=log, 

42 settings=settings, 

43 dbConn=dbConn 

44 ).ingest(withinLastDays=withInLastDay)  

45 ``` 

46 

47 """ 

48 # Initialisation 

49 

50 def __init__( 

51 self, 

52 log, 

53 dbConn, 

54 settings=False, 

55 ): 

56 self.log = log 

57 log.debug("instansiating a new 'data' object") 

58 self.settings = settings 

59 self.dbConn = dbConn 

60 

61 self.fsTableName = "fs_atlas" 

62 self.survey = "ATLAS" 

63 

64 # xt-self-arg-tmpx 

65 

66 return None 

67 

68 def ingest( 

69 self, 

70 withinLastDays): 

71 """*Ingest the data into the marshall feeder survey table* 

72 

73 **Key Arguments** 

74 

75 - ``withinLastDays`` -- within the last number of days. *Default: 50* 

76 

77 """ 

78 self.log.debug('starting the ``ingest`` method') 

79 

80 timelimit = datetime.now() - timedelta(days=int(withinLastDays)) 

81 timelimit = timelimit.strftime("%Y-%m-%d") 

82 

83 csvDicts = self.get_csv_data( 

84 url=self.settings["atlas urls"]["summary csv"] + f"?followup_flag_date__gte={timelimit}" 

85 ) 

86 

87 self._clean_data_pre_ingest( 

88 surveyName="ATLAS", withinLastDays=withinLastDays) 

89 

90 self._import_to_feeder_survey_table() 

91 self.insert_into_transientBucket(updateTransientSummaries=False) 

92 

93 sqlQuery = """call update_fs_atlas_forced_phot()""" % locals() 

94 writequery( 

95 log=self.log, 

96 sqlQuery=sqlQuery, 

97 dbConn=self.dbConn 

98 ) 

99 

100 self.fsTableName = "fs_atlas_forced_phot" 

101 self.survey = "ATLAS FP" 

102 

103 sqlQuery = """CALL update_transientBucket_atlas_sources()""" % locals() 

104 writequery( 

105 log=self.log, 

106 sqlQuery=sqlQuery, 

107 dbConn=self.dbConn 

108 ) 

109 

110 self.insert_into_transientBucket(importUnmatched=False) 

111 

112 # UPDATE THE ATLAS SPECIFIC FLUX SPACE LIGHTCURVES 

113 generate_atlas_lightcurves( 

114 log=self.log, 

115 dbConn=self.dbConn, 

116 settings=self.settings 

117 ) 

118 

119 # CLEAN UP TASKS TO MAKE THE TICKET UPDATE 

120 self.clean_up() 

121 

122 self.log.debug('completed the ``ingest`` method') 

123 return None 

124 

125 def _clean_data_pre_ingest( 

126 self, 

127 surveyName, 

128 withinLastDays=False): 

129 """*clean up the list of dictionaries containing the ATLAS data, pre-ingest* 

130 

131 **Key Arguments** 

132 

133 - ``surveyName`` -- the ATLAS survey name 

134 - ``withinLastDays`` -- the lower limit of observations to include (within the last N days from now). Default *False*, i.e. no limit 

135 

136 

137 **Return** 

138 

139 - ``dictList`` -- the cleaned list of dictionaries ready for ingest 

140 

141 

142 **Usage** 

143 

144 To clean the data from the ATLAS survey: 

145 

146 ```python 

147 dictList = ingesters._clean_data_pre_ingest(surveyName="ATLAS") 

148 ``` 

149 

150 Note you will also be able to access the data via ``ingester.dictList`` 

151 

152 """ 

153 self.log.debug('starting the ``_clean_data_pre_ingest`` method') 

154 

155 self.dictList = [] 

156 

157 # CALC MJD LIMIT 

158 if withinLastDays: 

159 mjdLimit = now( 

160 log=self.log 

161 ).get_mjd() - float(withinLastDays) 

162 

163 # CONVERTER TO CONVERT MJD TO DATE 

164 converter = conversions( 

165 log=self.log 

166 ) 

167 

168 for row in self.csvDicts: 

169 # IF NOW IN THE LAST N DAYS - SKIP 

170 flagMjd = converter.ut_datetime_to_mjd( 

171 utDatetime=row["followup_flag_date"]) 

172 

173 if withinLastDays and (float(row["earliest_mjd"]) < mjdLimit and float(flagMjd) < mjdLimit): 

174 continue 

175 

176 # MASSAGE THE DATA IN THE INPUT FORMAT TO WHAT IS NEEDED IN THE 

177 # FEEDER SURVEY TABLE IN THE DATABASE 

178 target = row["target"] 

179 diff = row["diff"] 

180 ref = row["ref"] 

181 targetImageURL = None 

182 refImageURL = None 

183 diffImageURL = None 

184 

185 if target: 

186 mjdStr = str(int(float(target.split("_")[1]))) 

187 if target: 

188 iid, mjdString, diffId, ippIdet, type = target.split('_') 

189 targetImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/media/images/data/atlas4/" % locals() + '/' + \ 

190 mjdStr + '/' + target + '.jpeg' 

191 objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid 

192 

193 if ref: 

194 mjdStr = str(int(float(ref.split("_")[1]))) 

195 if ref: 

196 iid, mjdString, diffId, ippIdet, type = ref.split('_') 

197 refImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/media/images/data/atlas4/" % locals() + '/' + \ 

198 mjdStr + '/' + ref + '.jpeg' 

199 objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid 

200 

201 if diff: 

202 mjdStr = str(int(float(diff.split("_")[1]))) 

203 if diff: 

204 iid, mjdString, diffId, ippIdet, type = diff.split('_') 

205 diffImageURL = "https://star.pst.qub.ac.uk/sne/atlas4/media/images/data/atlas4/" % locals() + '/' + \ 

206 mjdStr + '/' + diff + '.jpeg' 

207 objectURL = "https://star.pst.qub.ac.uk/sne/atlas4/candidate/" + iid 

208 

209 discDate = converter.mjd_to_ut_datetime( 

210 mjd=row["earliest_mjd"], sqlDate=True) 

211 

212 thisDictionary = {} 

213 thisDictionary["candidateID"] = row["name"] 

214 thisDictionary["ra_deg"] = row["ra"] 

215 thisDictionary["dec_deg"] = row["dec"] 

216 thisDictionary["mag"] = row["earliest_mag"] 

217 thisDictionary["observationMJD"] = row["earliest_mjd"] 

218 thisDictionary["filter"] = row["earliest_filter"] 

219 thisDictionary["discDate"] = discDate 

220 thisDictionary["discMag"] = row["earliest_mag"] 

221 thisDictionary["suggestedType"] = row["object_classification"] 

222 thisDictionary["targetImageURL"] = targetImageURL 

223 thisDictionary["refImageURL"] = refImageURL 

224 thisDictionary["diffImageURL"] = diffImageURL 

225 thisDictionary["objectURL"] = objectURL 

226 

227 self.dictList.append(thisDictionary) 

228 

229 self.log.debug('completed the ``_clean_data_pre_ingest`` method') 

230 return self.dictList 

231 

232 # use the tab-trigger below for new method 

233 # xt-class-method