Coverage for marshallEngine/feeders/atlas/lightcurve.py : 0%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#!/usr/local/bin/python
2# encoding: utf-8
3"""
4*Generate the force-photometry lightcurves for ATLAS sources found in the marshall database*
6:Author:
7 David Young
8"""
9from __future__ import print_function
10from __future__ import division
11from builtins import zip
12from builtins import str
13from past.utils import old_div
14import sys
15import os
16# SUPPRESS MATPLOTLIB WARNINGS
17import warnings
18warnings.filterwarnings("ignore")
19import matplotlib as mpl
20import matplotlib.pyplot as plt
21import math
22import numpy as np
23from matplotlib import dates
24os.environ['TERM'] = 'vt100'
25from fundamentals import tools
26from datetime import datetime
27from operator import itemgetter
28from fundamentals.stats import rolling_window_sigma_clip
29import matplotlib.ticker as mtick
30from astrocalc.times import conversions
31from fundamentals import fmultiprocess
32from fundamentals.mysql import database, readquery, writequery
35def generate_atlas_lightcurves(
36 dbConn,
37 log,
38 settings):
39 """generate all atlas FP lightcurves (clipped and stacked)
41 **Key Arguments**
43 - ``dbConn`` -- mysql database connection
44 - ``log`` -- logger
45 - ``settings`` -- settings for the marshall.
47 ```python
48 from marshallEngine.feeders.atlas.lightcurve import generate_atlas_lightcurves
49 generate_atlas_lightcurves(
50 log=log,
51 dbConn=dbConn,
52 settings=settings
53 )
54 ```
55 """
56 log.debug('starting the ``generate_atlas_lightcurves`` function')
58 # SELECT SOURCES THAT NEED THEIR ATLAS FP LIGHTCURVES CREATED/UPDATED
59 sqlQuery = u"""
60 SELECT
61 t.transientBucketId
62 FROM
63 transientBucket t ,pesstoObjects p
64 WHERE
65 p.transientBucketId=t.transientBucketId
66 and t.survey = 'ATLAS FP' and t.limitingMag = 0
67 and ((p.atlas_fp_lightcurve < t.dateCreated and p.atlas_fp_lightcurve != 0) or p.atlas_fp_lightcurve is null)
68 GROUP BY t.transientBucketId;
69 """
70 rows = readquery(
71 log=log,
72 sqlQuery=sqlQuery,
73 dbConn=dbConn
74 )
75 transientIds = [r["transientBucketId"] for r in rows]
77 total = len(transientIds)
78 if total > 1000:
79 print("ATLAS lightcurves need generated for %(total)s sources - generating next 1000" % locals())
80 transientIds = transientIds[:1000]
81 total = len(transientIds)
82 else:
83 print("Generating ATLAS lightcurves for %(total)s sources" % locals())
85 # SETUP THE INITIAL FIGURE FOR THE PLOT (ONLY ONCE)
86 fig = plt.figure(
87 num=None,
88 figsize=(10, 10),
89 dpi=100,
90 facecolor=None,
91 edgecolor=None,
92 frameon=True)
93 mpl.rc('ytick', labelsize=18)
94 mpl.rc('xtick', labelsize=18)
95 mpl.rcParams.update({'font.size': 22})
97 # FORMAT THE AXES
98 ax = fig.add_axes(
99 [0.1, 0.1, 0.8, 0.8],
100 polar=False,
101 frameon=True)
102 ax.set_xlabel('MJD', labelpad=20)
103 ax.set_yticks([2.2])
105 # RHS AXIS TICKS
106 plt.setp(ax.xaxis.get_majorticklabels(),
107 rotation=45, horizontalalignment='right')
108 ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%5.0f'))
110 y_formatter = mpl.ticker.FormatStrFormatter("%2.1f")
111 ax.yaxis.set_major_formatter(y_formatter)
112 ax.xaxis.grid(False)
114 # ADD SECOND Y-AXIS
115 ax2 = ax.twinx()
116 ax2.yaxis.set_major_formatter(y_formatter)
117 ax2.set_ylabel('Flux ($\mu$Jy)', rotation=-90., labelpad=27)
118 ax2.grid(False)
120 # ADD SECOND X-AXIS
121 ax3 = ax.twiny()
122 ax3.grid(True)
123 plt.setp(ax3.xaxis.get_majorticklabels(),
124 rotation=45, horizontalalignment='left')
126 # CONVERTER TO CONVERT MJD TO DATE
127 converter = conversions(
128 log=log
129 )
131 if len(transientIds) < 3:
132 plotPaths = []
133 for transientBucketId in transientIds:
134 plotPaths.append(
135 plot_single_result(
136 log=log,
137 transientBucketId=transientBucketId,
138 fig=fig,
139 converter=converter,
140 ax=ax,
141 settings=settings)
142 )
143 else:
144 log.info("""starting multiprocessing""")
145 plotPaths = fmultiprocess(
146 log=log,
147 function=plot_single_result,
148 inputArray=transientIds,
149 poolSize=False,
150 timeout=7200,
151 fig=fig,
152 converter=converter,
153 ax=ax,
154 settings=settings
155 )
156 log.info("""finished multiprocessing""")
158 # REMOVE MISSING PLOTStrn
159 transientIdGood = [t for p, t in zip(plotPaths, transientIds) if p]
160 transientIdBad = [t for p, t in zip(plotPaths, transientIds) if p is None]
162 # UPDATE THE atlas_fp_lightcurve DATE FOR TRANSIENTS WE HAVE JUST
163 # GENERATED PLOTS FOR
164 if len(transientIdGood):
165 transientIdGood = (",").join([str(t) for t in transientIdGood])
166 sqlQuery = f"""update pesstoObjects set atlas_fp_lightcurve = NOW() where transientBucketID in ({transientIdGood})"""
167 writequery(
168 log=log,
169 sqlQuery=sqlQuery,
170 dbConn=dbConn
171 )
173 # UPDATE THE atlas_fp_lightcurve DATE FOR TRANSIENTS WE HAVE JUST
174 # GENERATED PLOTS FOR
175 if len(transientIdBad):
176 transientIdBad = (",").join([str(t) for t in transientIdBad])
177 sqlQuery = f"""update pesstoObjects set atlas_fp_lightcurve = 0 where transientBucketID in ({transientIdBad})"""
178 writequery(
179 log=log,
180 sqlQuery=sqlQuery,
181 dbConn=dbConn
182 )
184 log.debug('completed the ``generate_atlas_lightcurves`` function')
185 return None
188def plot_single_result(
189 transientBucketId,
190 log,
191 fig,
192 converter,
193 ax,
194 settings):
195 """*plot single result*
197 **Key Arguments:**
198 - `transientBucketId` -- thr transient ID for the source in the database
199 - `log` -- logger
200 - `fig` -- the matplotlib figure to use for the plot
201 - `converter` -- converter to switch mjd to ut-date
202 - `ax` -- plot axis
203 - `settings` -- dictionary of settings (from yaml settings file)
205 **Return:**
206 - `filePath` -- path to the output PNG plot
207 """
208 log.info('starting the ``plot_single_result`` method')
210 # SETUP DATABASE CONNECTION
211 dbConn = database(
212 log=log,
213 dbSettings=settings["database settings"]
214 ).connect()
216 # GET THE DATA FROM THE DATABASE
217 sqlQuery = u"""
218 SELECT
219 atlas_designation,
220 mjd_obs,
221 filter,
222 marshall_mag as mag,
223 marshall_mag_error as dm,
224 fnu*1e29 as uJy,
225 fnu_error*1e29 as duJy,
226 snr,
227 zp,
228 marshall_limiting_mag as limiting_mag
229 FROM
230 fs_atlas_forced_phot
231 WHERE
232 transientBucketId = %(transientBucketId)s
233 and fnu is not null;
234 """ % locals()
236 epochs = readquery(
237 log=log,
238 sqlQuery=sqlQuery,
239 dbConn=dbConn
240 )
242 ax2 = get_twin_axis(ax, "x")
243 ax3 = get_twin_axis(ax, "y")
245 # ax = fig.gca()
246 epochs = sigma_clip_data(log=log, fpData=epochs)
248 # c = cyan, o = arange
249 magnitudes = {
250 'c': {'mjds': [], 'mags': [], 'magErrs': []},
251 'o': {'mjds': [], 'mags': [], 'magErrs': []},
252 'I': {'mjds': [], 'mags': [], 'magErrs': []},
253 }
255 # SPLIT BY FILTER
256 for epoch in epochs:
257 if epoch["filter"] in ["c", "o", "I"]:
258 magnitudes[epoch["filter"]]["mjds"].append(epoch["mjd_obs"])
259 magnitudes[epoch["filter"]]["mags"].append(epoch["uJy"])
260 magnitudes[epoch["filter"]]["magErrs"].append(epoch["duJy"])
262 # STACK PHOTOMETRY IF REQUIRED
263 magnitudes = stack_photometry(log=log,
264 magnitudes=magnitudes, binningDays=1.)
266 # ADD MAGNITUDES AND LIMITS FOR EACH FILTER
267 handles = []
269 # SET AXIS LIMITS FOR MAGNTIUDES
270 upperMag = -99999999999
271 lowerMag = 99999999999
273 # DETERMINE THE TIME-RANGE OF DETECTION FOR THE SOURCE
274 mjdList = magnitudes['o']['mjds'] + \
275 magnitudes['c']['mjds'] + magnitudes['I']['mjds']
276 if len(mjdList) == 0:
277 log.error(f'{transientBucketId} does not have enough data to plot LC')
278 return None
279 lowerDetectionMjd = min(mjdList)
280 upperDetectionMjd = max(mjdList)
282 # DETERMIN MAGNITUDE RANGE
283 allMags = magnitudes['o']['mags'] + magnitudes['c']['mags']
284 magRange = max(allMags) - min(allMags)
285 deltaMag = magRange * 0.1
287 if len(magnitudes['o']['mjds']):
288 orangeMag = ax.errorbar(magnitudes['o']['mjds'], magnitudes['o']['mags'], yerr=magnitudes[
289 'o']['magErrs'], color='#FFA500', fmt='o', mfc='#FFA500', mec='#FFA500', zorder=1, ms=12., alpha=0.8, linewidth=1.2, label='o-band mag ', capsize=10)
291 # ERROBAR CAP THICKNESS
292 orangeMag[1][0].set_markeredgewidth('0.7')
293 orangeMag[1][1].set_markeredgewidth('0.7')
294 handles.append(orangeMag)
295 errMask = np.array(magnitudes['o']['magErrs'])
296 np.putmask(errMask, errMask > 30, 30)
298 if max(np.array(magnitudes['o']['mags']) + errMask) > upperMag:
299 upperMag = max(
300 np.array(magnitudes['o']['mags']) + errMask)
301 upperMagIndex = np.argmax((
302 magnitudes['o']['mags']) + errMask)
304 if min(np.array(magnitudes['o']['mags']) - errMask) < lowerMag:
305 lowerMag = min(
306 np.array(magnitudes['o']['mags']) - errMask)
307 lowerMagIndex = np.argmin((
308 magnitudes['o']['mags']) - errMask)
310 if len(magnitudes['c']['mjds']):
311 cyanMag = ax.errorbar(magnitudes['c']['mjds'], magnitudes['c']['mags'], yerr=magnitudes[
312 'c']['magErrs'], color='#2aa198', fmt='o', mfc='#2aa198', mec='#2aa198', zorder=1, ms=12., alpha=0.8, linewidth=1.2, label='c-band mag ', capsize=10)
313 # ERROBAR CAP THICKNESS
314 cyanMag[1][0].set_markeredgewidth('0.7')
315 cyanMag[1][1].set_markeredgewidth('0.7')
316 handles.append(cyanMag)
317 errMask = np.array(magnitudes['c']['magErrs'])
318 np.putmask(errMask, errMask > 30, 30)
320 if max(np.array(magnitudes['c']['mags']) + errMask) > upperMag:
321 upperMag = max(
322 np.array(magnitudes['c']['mags']) + errMask)
323 upperMagIndex = np.argmax((
324 magnitudes['c']['mags']) + errMask)
326 if min(np.array(magnitudes['c']['mags']) - errMask) < lowerMag:
327 lowerMag = min(
328 np.array(magnitudes['c']['mags']) - errMask)
329 lowerMagIndex = np.argmin(
330 (magnitudes['c']['mags']) - errMask)
332 # if self.firstPlot:
333 plt.legend(handles=handles, prop={
334 'size': 13.5}, bbox_to_anchor=(0.95, 1.2), loc=0, borderaxespad=0., ncol=4, scatterpoints=1)
336 # SET THE TEMPORAL X-RANGE
337 allMjd = magnitudes['o']['mjds'] + magnitudes['c']['mjds']
338 xmin = min(allMjd) - 5.
339 xmax = max(allMjd) + 5.
340 mjdRange = xmax - xmin
341 ax.set_xlim([xmin, xmax])
342 ax.set_ylim([lowerMag - deltaMag, upperMag + deltaMag])
344 # PLOT THE MAGNITUDE SCALE
345 axisUpperFlux = upperMag
346 axisLowerFlux = 1e-29
347 axisLowerMag = -2.5 * math.log10(axisLowerFlux) + 23.9
348 if axisUpperFlux > 0.:
349 axisUpperMag = -2.5 * math.log10(axisUpperFlux) + 23.9
350 else:
351 axisUpperMag = None
352 if axisUpperMag:
353 ax.set_ylabel('Apparent Magnitude', labelpad=15)
354 magLabels = [20., 17.0, 15.0, 14.0, 13.5, 13.0]
355 if axisUpperMag < 14:
356 magLabels = [20.0, 16.0, 15.0, 14.0,
357 13.0, 12.5, 12.0, 11.5, 11.0]
358 elif axisUpperMag < 17:
359 magLabels = [20.,
360 18.0, 17.0, 16.5, 16.0, 15.5, 15.0]
361 elif axisUpperMag < 18:
362 magLabels = [20., 19.5, 19.0, 18.5,
363 18.0, 17.5, 17.0, 16.5, 16.0, 15.5, 15.0]
364 elif axisUpperMag < 20:
365 magLabels = [20.5, 20.0, 19.5, 19.0, 18.5, 18.0]
367 magFluxes = [pow(10, old_div(-(m - 23.9), 2.5)) for m in magLabels]
369 ax.yaxis.set_major_locator(mtick.FixedLocator((magFluxes)))
370 ax.yaxis.set_major_formatter(mtick.FixedFormatter((magLabels)))
371 else:
372 ax.set_yticks([])
374 # ADD SECOND Y-AXIS
375 ax2.set_ylim([lowerMag - deltaMag, upperMag + deltaMag])
377 # RELATIVE TIME SINCE DISCOVERY
378 lower, upper = ax.get_xlim()
379 utLower = converter.mjd_to_ut_datetime(mjd=lower, datetimeObject=True)
380 utUpper = converter.mjd_to_ut_datetime(mjd=upper, datetimeObject=True)
381 ax3.set_xlim([utLower, utUpper])
383 if mjdRange > 365:
384 ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %d %y'))
385 else:
386 ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %d'))
388 # FIND THE CACHE DIR FOR THE SOURCE
389 cacheDirectory = settings[
390 "cache-directory"] + "/transients/" + str(transientBucketId)
391 if not os.path.exists(cacheDirectory):
392 os.makedirs(cacheDirectory)
393 filePath = cacheDirectory + "/atlas_fp_lightcurve.png"
394 plt.savefig(filePath, bbox_inches='tight', transparent=False,
395 pad_inches=0.1, optimize=True, progressive=True)
397 try:
398 cyanMag.remove()
399 except:
400 pass
402 try:
403 orangeMag.remove()
404 except:
405 pass
407 firstPlot = False
409 log.debug('completed the ``plot_single_result`` method')
410 return filePath
413def sigma_clip_data(
414 log,
415 fpData,
416 clippingSigma=2.2):
417 """*clean up rouge data from the files by performing some basic clipping*
419 **Key Arguments:**
421 - `fpData` -- data dictionary of force photometry
422 - `clippingSigma` -- the level at which to clip flux data
424 **Return:**
426 - `epochs` -- sigma clipped and cleaned epoch data
427 """
428 log.info('starting the ``sigma_clip_data`` function')
430 mjdMin = False
431 mjdMax = False
433 # PARSE DATA WITH SOME FIXED CLIPPING
434 oepochs = []
435 cepochs = []
437 for row in fpData:
438 # REMOVE VERY HIGH ERROR DATA POINTS
439 if row["duJy"] > 4000:
440 continue
441 if mjdMin and mjdMax:
442 if row["mjd_obs"] < mjdMin or row["mjd_obs"] > mjdMax:
443 continue
444 if row["filter"] == "c":
445 cepochs.append(row)
446 if row["filter"] == "o":
447 oepochs.append(row)
449 # SORT BY MJD
450 cepochs = sorted(cepochs, key=itemgetter('mjd_obs'), reverse=False)
451 oepochs = sorted(oepochs, key=itemgetter('mjd_obs'), reverse=False)
453 # SIGMA-CLIP THE DATA WITH A ROLLING WINDOW
454 cdataFlux = []
455 cdataFlux[:] = [row["uJy"] for row in cepochs]
456 odataFlux = []
457 odataFlux[:] = [row["uJy"] for row in oepochs]
459 maskList = []
460 for flux in [cdataFlux, odataFlux]:
461 fullMask = rolling_window_sigma_clip(
462 log=log,
463 array=flux,
464 clippingSigma=clippingSigma,
465 windowSize=11)
466 maskList.append(fullMask)
468 try:
469 cepochs = [e for e, m in zip(
470 cepochs, maskList[0]) if m == False]
471 except:
472 cepochs = []
474 try:
475 oepochs = [e for e, m in zip(
476 oepochs, maskList[1]) if m == False]
477 except:
478 oepochs = []
480 log.debug('completed the ``sigma_clip_data`` function')
481 return cepochs + oepochs
484def stack_photometry(
485 log,
486 magnitudes,
487 binningDays=1.):
488 """*stack the photometry for the given temporal range*
490 **Key Arguments:**
491 - `magnitudes` -- dictionary of photometry divided into filter sets
492 - `binningDays` -- the binning to use (in days)
494 **Return:**
495 - `summedMagnitudes` -- the stacked photometry
496 """
497 log.debug('starting the ``stack_photometry`` method')
499 # IF WE WANT TO 'STACK' THE PHOTOMETRY
500 summedMagnitudes = {
501 'c': {'mjds': [], 'mags': [], 'magErrs': [], 'n': []},
502 'o': {'mjds': [], 'mags': [], 'magErrs': [], 'n': []},
503 'I': {'mjds': [], 'mags': [], 'magErrs': [], 'n': []},
504 }
506 # MAGNITUDES/FLUXES ARE DIVIDED IN UNIQUE FILTER SETS - SO ITERATE OVER
507 # FILTERS
508 allData = []
509 for fil, data in list(magnitudes.items()):
510 # WE'RE GOING TO CREATE FURTHER SUBSETS FOR EACH UNQIUE MJD (FLOORED TO AN INTEGER)
511 # MAG VARIABLE == FLUX (JUST TO CONFUSE YOU)
512 distinctMjds = {}
513 for mjd, flx, err in zip(data["mjds"], data["mags"], data["magErrs"]):
514 # DICT KEY IS THE UNIQUE INTEGER MJD
515 key = str(int(math.floor(mjd / float(binningDays))))
516 # FIRST DATA POINT OF THE NIGHTS? CREATE NEW DATA SET
517 if key not in distinctMjds:
518 distinctMjds[key] = {
519 "mjds": [mjd],
520 "mags": [flx],
521 "magErrs": [err]
522 }
523 # OR NOT THE FIRST? APPEND TO ALREADY CREATED LIST
524 else:
525 distinctMjds[key]["mjds"].append(mjd)
526 distinctMjds[key]["mags"].append(flx)
527 distinctMjds[key]["magErrs"].append(err)
529 # ALL DATA NOW IN MJD SUBSETS. SO FOR EACH SUBSET (I.E. INDIVIDUAL
530 # NIGHTS) ...
531 for k, v in list(distinctMjds.items()):
532 # GIVE ME THE MEAN MJD
533 meanMjd = old_div(sum(v["mjds"]), len(v["mjds"]))
534 summedMagnitudes[fil]["mjds"].append(meanMjd)
535 # GIVE ME THE MEAN FLUX
536 meanFLux = old_div(sum(v["mags"]), len(v["mags"]))
537 summedMagnitudes[fil]["mags"].append(meanFLux)
538 # GIVE ME THE COMBINED ERROR
539 combError = sum(v["magErrs"]) / len(v["magErrs"]
540 ) / math.sqrt(len(v["magErrs"]))
541 summedMagnitudes[fil]["magErrs"].append(combError)
542 # GIVE ME NUMBER OF DATA POINTS COMBINED
543 n = len(v["mjds"])
544 summedMagnitudes[fil]["n"].append(n)
545 allData.append({
546 'MJD': f'{meanMjd:0.2f}',
547 'uJy': f'{meanFLux:0.2f}',
548 'duJy': f'{combError:0.2f}',
549 'F': fil,
550 'n': n
551 })
553 log.debug('completed the ``stack_photometry`` method')
554 return summedMagnitudes
557def get_twin(ax, axis):
559 for sibling in siblings:
560 if sibling.bbox.bounds == ax.bbox.bounds and sibling is not ax:
561 return sibling
562 return None
565def get_twin_axis(ax, axis):
566 assert axis in ("x", "y")
567 for other_ax in ax.figure.axes:
568 if other_ax is ax:
569 siblings = getattr(ax, f"get_shared_{axis}_axes")().get_siblings(ax)
570 for sibling in siblings:
571 if sibling.bbox.bounds == ax.bbox.bounds and sibling is not ax:
572 return sibling
573 return None