Coverage for transientNamer/search.py : 50%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#!/usr/local/bin/python
2# encoding: utf-8
3"""
4*Search the Transient Name Server with various search constraints*
6:Author:
7 David Young
8"""
9from __future__ import print_function
10from builtins import str
11from builtins import object
12import sys
13import os
14import re
15import requests
16requests.packages.urllib3.disable_warnings()
17import copy
18import collections
19from operator import itemgetter
20os.environ['TERM'] = 'vt100'
21from fundamentals import tools
22from datetime import datetime, date, time, timedelta
23import time as timesleep
24from fundamentals.files import list_of_dictionaries_to_mysql_inserts
25from fundamentals.renderer import list_of_dictionaries
26from astrocalc.coords import unit_conversion
27import time
30class search(object):
31 """
32 *Search the Transient Name Server with various search constraints*
34 **Key Arguments**
36 - ``log`` -- logger
37 - ``settings`` -- the settings dictionary
38 - ``ra`` -- RA of the location being checked
39 - ``dec`` -- DEC of the location being searched
40 - ``radiusArcsec`` - the radius of the conesearch to perform against the TNS
41 - ``name`` -- name of the object to search the TNS for
42 - ``discInLastDays`` -- search the TNS for transient reported in the last X days
43 - ``comments`` -- print the comments from the TNS, note these can be long making table outputs somewhat unreadable. Default *False*
46 **Usage**
48 To initiate a search object to search the TNS via an object name (either TNS or survey names accepted):
50 ```python
51 from transientNamer import search
52 tns = search(
53 log=log,
54 name="Gaia16bbi"
55 )
56 ```
58 or for a conesearch use something similar to:
60 ```python
61 from transientNamer import search
62 tns = search(
63 log=log,
64 ra="06:50:36.74",
65 dec="+31:06:44.7",
66 radiusArcsec=5
67 )
68 ```
70 Note the search method can accept coordinates in sexagesimal or decimal defree formats.
72 To list all new objects reported in the last three weeks, then use:
74 ```python
75 from transientNamer import search
76 tns = search(
77 log=log,
78 discInLastDays=21
79 )
80 ```
82 """
83 # Initialisation
85 def __init__(
86 self,
87 log,
88 ra="",
89 dec="",
90 radiusArcsec="",
91 name="",
92 discInLastDays="",
93 settings=False,
94 comments=False
95 ):
96 self.log = log
97 log.debug("instansiating a new 'search' object")
98 self.settings = settings
99 self.ra = ra
100 self.dec = dec
101 self.radiusArcsec = radiusArcsec
102 self.comments = comments
103 self.name = name
104 self.internal_name = ""
105 self.discInLastDays = discInLastDays
106 self.page = 0
107 self.batchSize = 500
109 # CREATE THE TIME-RANGE WINDOW TO SEARCH TNS
110 if not discInLastDays:
111 self.discInLastDays = ""
112 self.period_units = ""
113 else:
114 self.discInLastDays = int(discInLastDays)
115 self.period_units = "days"
117 # DETERMINE IF WE HAVE A TNS OR INTERAL SURVEY NAME
118 if self.name:
119 matchObject = re.match(r'^((SN|AT) ?)?(\d{4}\w{1,6})', self.name)
120 if matchObject:
121 self.name = matchObject.group(3)
122 else:
123 self.internal_name = self.name
124 self.name = ""
126 # DO THE SEARCH OF THE TNS AND COMPILE THE RESULTS INTO SEPARATE RESULT
127 # SETS
128 self.sourceResultsList, self.photResultsList, self.specResultsList, self.relatedFilesResultsList = self._query_tns()
129 self.sourceResults = list_of_dictionaries(
130 log=log,
131 listOfDictionaries=self.sourceResultsList
132 )
133 self.photResults = list_of_dictionaries(
134 log=log,
135 listOfDictionaries=self.photResultsList
136 )
137 self.specResults = list_of_dictionaries(
138 log=log,
139 listOfDictionaries=self.specResultsList
140 )
141 self.relatedFilesResults = list_of_dictionaries(
142 log=log,
143 listOfDictionaries=self.relatedFilesResultsList
144 )
146 return None
148 @property
149 def sources(
150 self):
151 """*The results of the search returned as a python list of dictionaries*
153 **Usage**
155 ```python
156 sources = tns.sources
157 ```
159 """
160 sourceResultsList = []
161 sourceResultsList[:] = [dict(l) for l in self.sourceResultsList]
162 return sourceResultsList
164 @property
165 def spectra(
166 self):
167 """*The associated source spectral data*
169 **Usage**
171 ```python
172 sourceSpectra = tns.spectra
173 ```
175 """
176 specResultsList = []
177 specResultsList[:] = [dict(l) for l in self.specResultsList]
178 return specResultsList
180 @property
181 def files(
182 self):
183 """*The associated source files*
185 **Usage**
187 ```python
188 sourceFiles = tns.files
189 ```
191 """
192 relatedFilesResultsList = []
193 relatedFilesResultsList[:] = [dict(l)
194 for l in self.relatedFilesResultsList]
195 return relatedFilesResultsList
197 @property
198 def photometry(
199 self):
200 """*The associated source photometry*
202 **Usage**
204 ```python
205 sourcePhotometry = tns.photometry
206 ```
208 """
209 photResultsList = []
210 photResultsList[:] = [dict(l) for l in self.photResultsList]
211 return photResultsList
213 @property
214 def url(
215 self):
216 """*The generated URL used for searching of the TNS*
218 **Usage**
220 ```python
221 searchURL = tns.url
222 ```
224 """
226 return self._searchURL
228 def csv(
229 self,
230 dirPath=None):
231 """*Render the results in csv format*
233 **Key Arguments**
235 - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
238 **Return**
240 - `csvSources` -- the top-level transient data
241 - `csvPhot` -- all photometry associated with the transients
242 - `csvSpec` -- all spectral data associated with the transients
243 - `csvFiles` -- all files associated with the matched transients found on the tns
246 **Usage**
248 To render the results in csv format:
250 ```python
251 csvSources, csvPhot, csvSpec, csvFiles = tns.csv()
252 print(csvSources)
253 ```
255 ```text
256 TNSId,TNSName,discoveryName,discSurvey,raSex,decSex,raDeg,decDeg,transRedshift,specType,discMag,discMagFilter,discDate,objectUrl,hostName,hostRedshift,separationArcsec,separationNorthArcsec,separationEastArcsec
257 2016asf,SN2016asf,ASASSN-16cs,ASAS-SN,06:50:36.73,+31:06:45.36,102.6530,31.1126,0.021,SN Ia,17.1,V-Johnson,2016-03-06 08:09:36,https://www.wis-tns.org/object/2016asf,KUG 0647+311,,0.66,0.65,-0.13
258 ```
260 You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
262 ```python
263 tns.csv("~/tns")
264 ```
266 .. image:: https://i.imgur.com/BwwqMBg.png
267 :width: 800px
268 :alt: csv output
270 """
272 if dirPath:
273 p = self._file_prefix()
274 csvSources = self.sourceResults.csv(
275 filepath=dirPath + "/" + p + "sources.csv")
276 csvPhot = self.photResults.csv(
277 filepath=dirPath + "/" + p + "phot.csv")
278 csvSpec = self.specResults.csv(
279 filepath=dirPath + "/" + p + "spec.csv")
280 csvFiles = self.relatedFilesResults.csv(
281 filepath=dirPath + "/" + p + "relatedFiles.csv")
282 else:
283 csvSources = self.sourceResults.csv()
284 csvPhot = self.photResults.csv()
285 csvSpec = self.specResults.csv()
286 csvFiles = self.relatedFilesResults.csv()
287 return csvSources, csvPhot, csvSpec, csvFiles
289 def json(
290 self,
291 dirPath=None):
292 """*Render the results in json format*
294 **Key Arguments**
296 - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
299 **Return**
301 - `jsonSources` -- the top-level transient data
302 - `jsonPhot` -- all photometry associated with the transients
303 - `jsonSpec` -- all spectral data associated with the transients
304 - `jsonFiles` -- all files associated with the matched transients found on the tns
307 **Usage**
309 To render the results in json format:
311 ```python
312 jsonSources, jsonPhot, jsonSpec, jsonFiles = tns.json()
313 print(jsonSources)
314 ```
316 ```text
317 [
318 {
319 "TNSId": "2016asf",
320 "TNSName": "SN2016asf",
321 "decDeg": 31.1126,
322 "decSex": "+31:06:45.36",
323 "discDate": "2016-03-06 08:09:36",
324 "discMag": "17.1",
325 "discMagFilter": "V-Johnson",
326 "discSurvey": "ASAS-SN",
327 "discoveryName": "ASASSN-16cs",
328 "hostName": "KUG 0647+311",
329 "hostRedshift": null,
330 "objectUrl": "https://www.wis-tns.org/object/2016asf",
331 "raDeg": 102.65304166666667,
332 "raSex": "06:50:36.73",
333 "separationArcsec": "0.66",
334 "separationEastArcsec": "-0.13",
335 "separationNorthArcsec": "0.65",
336 "specType": "SN Ia",
337 "transRedshift": "0.021"
338 }
339 ]
340 ```
342 You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
344 ```python
345 tns.json("~/tns")
346 ```
348 .. image:: https://i.imgur.com/wAHqARI.png
349 :width: 800px
350 :alt: json output
352 """
354 if dirPath:
355 p = self._file_prefix()
356 jsonSources = self.sourceResults.json(
357 filepath=dirPath + "/" + p + "sources.json")
358 jsonPhot = self.photResults.json(
359 filepath=dirPath + "/" + p + "phot.json")
360 jsonSpec = self.specResults.json(
361 filepath=dirPath + "/" + p + "spec.json")
362 jsonFiles = self.relatedFilesResults.json(
363 filepath=dirPath + "/" + p + "relatedFiles.json")
364 else:
365 jsonSources = self.sourceResults.json()
366 jsonPhot = self.photResults.json()
367 jsonSpec = self.specResults.json()
368 jsonFiles = self.relatedFilesResults.json()
369 return jsonSources, jsonPhot, jsonSpec, jsonFiles
371 def yaml(
372 self,
373 dirPath=None):
374 """*Render the results in yaml format*
376 **Key Arguments**
378 - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
381 **Return**
383 - `yamlSources` -- the top-level transient data
384 - `yamlPhot` -- all photometry associated with the transients
385 - `yamlSpec` -- all spectral data associated with the transients
386 - `yamlFiles` -- all files associated with the matched transients found on the tns
389 **Usage**
391 To render the results in yaml format:
393 ```python
394 yamlSources, yamlPhot, yamlSpec, yamlFiles = tns.yaml()
395 print(yamlSources)
396 ```
398 ```text
399 - TNSId: 2016asf
400 TNSName: SN2016asf
401 decDeg: 31.1126
402 decSex: '+31:06:45.36'
403 discDate: '2016-03-06 08:09:36'
404 discMag: '17.1'
405 discMagFilter: V-Johnson
406 discSurvey: ASAS-SN
407 discoveryName: ASASSN-16cs
408 hostName: KUG 0647+311
409 hostRedshift: null
410 objectUrl: https://www.wis-tns.org/object/2016asf
411 raDeg: 102.65304166666667
412 raSex: '06:50:36.73'
413 separationArcsec: '0.66'
414 separationEastArcsec: '-0.13'
415 separationNorthArcsec: '0.65'
416 specType: SN Ia
417 transRedshift: '0.021'
418 ```
420 You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
422 ```python
423 tns.yaml("~/tns")
424 ```
426 .. image:: https://i.imgur.com/ZpJIC6p.png
427 :width: 800px
428 :alt: yaml output
430 """
432 if dirPath:
433 p = self._file_prefix()
434 yamlSources = self.sourceResults.yaml(
435 filepath=dirPath + "/" + p + "sources.yaml")
436 yamlPhot = self.photResults.yaml(
437 filepath=dirPath + "/" + p + "phot.yaml")
438 yamlSpec = self.specResults.yaml(
439 filepath=dirPath + "/" + p + "spec.yaml")
440 yamlFiles = self.relatedFilesResults.yaml(
441 filepath=dirPath + "/" + p + "relatedFiles.yaml")
442 else:
443 yamlSources = self.sourceResults.yaml()
444 yamlPhot = self.photResults.yaml()
445 yamlSpec = self.specResults.yaml()
446 yamlFiles = self.relatedFilesResults.yaml()
447 return yamlSources, yamlPhot, yamlSpec, yamlFiles
449 def markdown(
450 self,
451 dirPath=None):
452 """*Render the results in markdown format*
454 **Key Arguments**
456 - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
459 **Return**
461 - `markdownSources` -- the top-level transient data
462 - `markdownPhot` -- all photometry associated with the transients
463 - `markdownSpec` -- all spectral data associated with the transients
464 - `markdownFiles` -- all files associated with the matched transients found on the tns
467 **Usage**
469 To render the results in markdown table format:
471 ```python
472 markdownSources, markdownPhot, markdownSpec, markdownFiles = tns.markdown()
473 print(markdownSources)
474 ```
476 ```text
477 | TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec |
478 |:---------|:-----------|:---------------|:------------|:-------------|:--------------|:----------|:---------|:---------------|:----------|:---------|:---------------|:---------------------|:----------------------------------------------|:--------------|:--------------|:------------------|:-----------------------|:----------------------|
479 | 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | https://www.wis-tns.org/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 |
480 ```
482 You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
484 ```python
485 tns.markdown("~/tns")
486 ```
488 .. image:: https://i.imgur.com/AYLBQoJ.png
489 :width: 800px
490 :alt: markdown output
492 """
494 if dirPath:
495 p = self._file_prefix()
496 markdownSources = self.sourceResults.markdown(
497 filepath=dirPath + "/" + p + "sources.md")
498 markdownPhot = self.photResults.markdown(
499 filepath=dirPath + "/" + p + "phot.md")
500 markdownSpec = self.specResults.markdown(
501 filepath=dirPath + "/" + p + "spec.md")
502 markdownFiles = self.relatedFilesResults.markdown(
503 filepath=dirPath + "/" + p + "relatedFiles.md")
504 else:
505 markdownSources = self.sourceResults.markdown()
506 markdownPhot = self.photResults.markdown()
507 markdownSpec = self.specResults.markdown()
508 markdownFiles = self.relatedFilesResults.markdown()
509 return markdownSources, markdownPhot, markdownSpec, markdownFiles
511 def table(
512 self,
513 dirPath=None):
514 """*Render the results as an ascii table*
516 **Key Arguments**
518 - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
521 **Return**
523 - `tableSources` -- the top-level transient data
524 - `tablePhot` -- all photometry associated with the transients
525 - `tableSpec` -- all spectral data associated with the transients
526 - `tableFiles` -- all files associated with the matched transients found on the tns
529 **Usage**
531 To render the results in ascii table format:
533 ```python
534 tableSources, tablePhot, tableSpec, tableFiles = tns.table()
535 print(tableSources)
536 ```
538 ```text
539 +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
540 | TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec |
541 +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
542 | 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | https://www.wis-tns.org/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 |
543 +----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
544 ```
546 You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
548 ```python
549 tns.table("~/tns")
550 ```
552 .. image:: https://i.imgur.com/m09M0ho.png
553 :width: 800px
554 :alt: ascii files
556 """
558 if dirPath:
559 p = self._file_prefix()
560 tableSources = self.sourceResults.table(
561 filepath=dirPath + "/" + p + "sources.ascii")
562 tablePhot = self.photResults.table(
563 filepath=dirPath + "/" + p + "phot.ascii")
564 tableSpec = self.specResults.table(
565 filepath=dirPath + "/" + p + "spec.ascii")
566 tableFiles = self.relatedFilesResults.table(
567 filepath=dirPath + "/" + p + "relatedFiles.ascii")
568 else:
569 tableSources = self.sourceResults.table()
570 tablePhot = self.photResults.table()
571 tableSpec = self.specResults.table()
572 tableFiles = self.relatedFilesResults.table()
573 return tableSources, tablePhot, tableSpec, tableFiles
575 def mysql(
576 self,
577 tableNamePrefix="TNS",
578 dirPath=None):
579 """*Render the results as MySQL Insert statements*
581 **Key Arguments**
583 - ``tableNamePrefix`` -- the prefix for the database table names to assign the insert statements to. Default *TNS*.
584 - ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
587 **Return**
589 - `mysqlSources` -- the top-level transient data
590 - `mysqlPhot` -- all photometry associated with the transients
591 - `mysqlSpec` -- all spectral data associated with the transients
592 - `mysqlFiles` -- all files associated with the matched transients found on the tns
595 **Usage**
597 To render the results in mysql insert format:
599 ```python
600 mysqlSources, mysqlPhot, mysqlSpec, mysqlFiles = tns.mysql("TNS")
601 print(mysqlSources)
602 ```
604 ```text
605 INSERT INTO `TNS_sources` (TNSId,TNSName,dateCreated,decDeg,decSex,discDate,discMag,discMagFilter,discSurvey,discoveryName,hostName,hostRedshift,objectUrl,raDeg,raSex,separationArcsec,separationEastArcsec,separationNorthArcsec,specType,transRedshift) VALUES ("2016asf" ,"SN2016asf" ,"2016-09-20T11:22:13" ,"31.1126" ,"+31:06:45.36" ,"2016-03-06 08:09:36" ,"17.1" ,"V-Johnson" ,"ASAS-SN" ,"ASASSN-16cs" ,"KUG 0647+311" ,null ,"https://www.wis-tns.org/object/2016asf" ,"102.653041667" ,"06:50:36.73" ,"0.66" ,"-0.13" ,"0.65" ,"SN Ia" ,"0.021") ON DUPLICATE KEY UPDATE TNSId="2016asf", TNSName="SN2016asf", dateCreated="2016-09-20T11:22:13", decDeg="31.1126", decSex="+31:06:45.36", discDate="2016-03-06 08:09:36", discMag="17.1", discMagFilter="V-Johnson", discSurvey="ASAS-SN", discoveryName="ASASSN-16cs", hostName="KUG 0647+311", hostRedshift=null, objectUrl="https://www.wis-tns.org/object/2016asf", raDeg="102.653041667", raSex="06:50:36.73", separationArcsec="0.66", separationEastArcsec="-0.13", separationNorthArcsec="0.65", specType="SN Ia", transRedshift="0.021", updated=1, dateLastModified=NOW() ;
606 ```
608 You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
610 ```python
611 tns.mysql("TNS", "~/tns")
612 ```
614 .. image:: https://i.imgur.com/CozySPW.png
615 :width: 800px
616 :alt: mysql output
618 """
619 if dirPath:
620 p = self._file_prefix()
622 createStatement = """
623CREATE TABLE `%(tableNamePrefix)s_sources` (
624 `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
625 `TNSId` varchar(20) NOT NULL,
626 `TNSName` varchar(20) DEFAULT NULL,
627 `dateCreated` datetime DEFAULT NULL,
628 `decDeg` double DEFAULT NULL,
629 `decSex` varchar(45) DEFAULT NULL,
630 `discDate` datetime DEFAULT NULL,
631 `discMag` double DEFAULT NULL,
632 `discMagFilter` varchar(45) DEFAULT NULL,
633 `discSurvey` varchar(100) DEFAULT NULL,
634 `discoveryName` varchar(100) DEFAULT NULL,
635 `objectUrl` varchar(200) DEFAULT NULL,
636 `raDeg` double DEFAULT NULL,
637 `raSex` varchar(45) DEFAULT NULL,
638 `specType` varchar(100) DEFAULT NULL,
639 `transRedshift` double DEFAULT NULL,
640 `updated` tinyint(4) DEFAULT '0',
641 `dateLastModified` datetime DEFAULT NULL,
642 `hostName` VARCHAR(100) NULL DEFAULT NULL,
643 `hostRedshift` DOUBLE NULL DEFAULT NULL,
644 `survey` VARCHAR(100) NULL DEFAULT NULL,
645 PRIMARY KEY (`primaryId`),
646 UNIQUE KEY `tnsid` (`TNSId`)
647) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
648 """ % locals()
650 mysqlSources = self.sourceResults.mysql(
651 tableNamePrefix + "_sources", filepath=dirPath + "/" + p + "sources.sql", createStatement=createStatement)
653 createStatement = """
654CREATE TABLE `%(tableNamePrefix)s_photometry` (
655 `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
656 `TNSId` varchar(20) NOT NULL,
657 `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
658 `exptime` double DEFAULT NULL,
659 `filter` varchar(100) DEFAULT NULL,
660 `limitingMag` tinyint(4) DEFAULT NULL,
661 `mag` double DEFAULT NULL,
662 `magErr` double DEFAULT NULL,
663 `magUnit` varchar(100) DEFAULT NULL,
664 `objectName` varchar(100) DEFAULT NULL,
665 `obsdate` datetime DEFAULT NULL,
666 `reportAddedDate` datetime DEFAULT NULL,
667 `suggestedType` varchar(100) DEFAULT NULL,
668 `survey` varchar(100) DEFAULT NULL,
669 `telescope` varchar(100) DEFAULT NULL,
670 `updated` tinyint(4) DEFAULT '0',
671 `dateLastModified` datetime DEFAULT NULL,
672 `remarks` VARCHAR(800) NULL DEFAULT NULL,
673 `sourceComment` VARCHAR(800) NULL DEFAULT NULL,
674 PRIMARY KEY (`primaryId`),
675 UNIQUE KEY `tnsid_survey_obsdate` (`TNSId`,`survey`,`obsdate`),
676 UNIQUE INDEX `u_tnsid_survey_obsdate` (`TNSId` ASC, `survey` ASC, `obsdate` ASC),
677 UNIQUE INDEX `u_tnsid_obsdate_objname` (`TNSId` ASC, `obsdate` ASC, `objectName` ASC)
678) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
679 """ % locals()
681 mysqlPhot = self.photResults.mysql(
682 tableNamePrefix + "_photometry", filepath=dirPath + "/" + p + "phot.sql", createStatement=createStatement)
684 createStatement = """
685CREATE TABLE `%(tableNamePrefix)s_spectra` (
686 `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
687 `TNSId` varchar(45) NOT NULL,
688 `TNSuser` varchar(45) DEFAULT NULL,
689 `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
690 `exptime` double DEFAULT NULL,
691 `obsdate` datetime DEFAULT NULL,
692 `reportAddedDate` datetime DEFAULT NULL,
693 `specType` varchar(100) DEFAULT NULL,
694 `survey` varchar(100) DEFAULT NULL,
695 `telescope` varchar(100) DEFAULT NULL,
696 `transRedshift` double DEFAULT NULL,
697 `updated` tinyint(4) DEFAULT '0',
698 `dateLastModified` datetime DEFAULT NULL,
699 `remarks` VARCHAR(800) NULL DEFAULT NULL,
700 `sourceComment` VARCHAR(800) NULL DEFAULT NULL,
701 PRIMARY KEY (`primaryId`),
702 UNIQUE KEY `u_tnsid_survey_obsdate` (`TNSId`,`survey`,`obsdate`),
703 UNIQUE KEY `u_id_user_obsdate` (`TNSId`,`TNSuser`,`obsdate`)
704) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
705 """ % locals()
707 mysqlSpec = self.specResults.mysql(
708 tableNamePrefix + "_spectra", filepath=dirPath + "/" + p + "spec.sql", createStatement=createStatement)
710 createStatement = """
711CREATE TABLE `%(tableNamePrefix)s_files` (
712 `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
713 `TNSId` varchar(100) NOT NULL,
714 `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
715 `dateObs` datetime DEFAULT NULL,
716 `filename` varchar(200) DEFAULT NULL,
717 `spec1phot2` tinyint(4) DEFAULT NULL,
718 `url` varchar(800) DEFAULT NULL,
719 `updated` tinyint(4) DEFAULT '0',
720 `dateLastModified` datetime DEFAULT NULL,
721 `comment` VARCHAR(800) NULL DEFAULT NULL,
722 PRIMARY KEY (`primaryId`),
723 UNIQUE KEY `tnsid_url` (`TNSId`,`url`)
724) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
725 """ % locals()
727 mysqlFiles = self.relatedFilesResults.mysql(
728 tableNamePrefix + "_files", filepath=dirPath + "/" + p + "relatedFiles.sql", createStatement=createStatement)
729 else:
730 mysqlSources = self.sourceResults.mysql(
731 tableNamePrefix + "_sources")
732 mysqlPhot = self.photResults.mysql(tableNamePrefix + "_photometry")
733 mysqlSpec = self.specResults.mysql(tableNamePrefix + "_spectra")
734 mysqlFiles = self.relatedFilesResults.mysql(
735 tableNamePrefix + "_files")
736 return mysqlSources, mysqlPhot, mysqlSpec, mysqlFiles
738 def _query_tns(self):
739 """
740 *determine how to query the TNS, send query and parse the results*
742 **Return**
744 - ``results`` -- a list of dictionaries (one dictionary for each result set returned from the TNS)
746 """
747 self.log.debug('starting the ``get`` method')
749 sourceTable = []
750 photoTable = []
751 specTable = []
752 relatedFilesTable = []
754 # THIS stop IS TO KEEP TRACK OF THE TNS PAGINATION IF MANY RESULT PAGES
755 # ARE RETURNED
756 stop = False
758 sourceCount = 0
759 failedCount = 0
760 while not stop:
762 status_code, content, self._searchURL = self._get_tns_search_results()
764 if status_code != 200:
765 # IF FAILED TOO MANY TIME - RETURN WHAT WE HAVE
766 if failedCount > 2:
767 self.log.error(f'cound not get the search reuslts from the TNS, HTML error code {status_code}. Search URL was {self._searchURL}')
768 raise ConnectionError(
769 'cound not get the search reuslts from the TNS, HTML error code %(status_code)s ' % locals())
770 return sourceTable, photoTable, specTable, relatedFilesTable
771 failedCount += 1
773 time.sleep(2)
774 continue
776 if "No results found" in content:
777 print("No results found")
778 return sourceTable, photoTable, specTable, relatedFilesTable
780 if self._parse_transient_rows(content, True) < self.batchSize:
781 stop = True
782 else:
783 self.page += 1
784 thisPage = self.page
785 print(
786 "Downloaded %(thisPage)s page(s) from the TNS. %(sourceCount)s transients parsed so far." % locals())
787 sourceCount += self.batchSize
788 # print "\t" + self._searchURL
789 timesleep.sleep(1)
791 # PARSE ALL ROWS RETURNED
792 for transientRow in self._parse_transient_rows(content):
794 # TOP LEVEL DISCOVERY CONTENT
795 sourceContent = transientRow.group()
796 discInfo, TNSId = self._parse_discovery_information(
797 sourceContent)
798 sourceTable.append(discInfo)
800 # PHOTOMETERY
801 phot, relatedFiles = self._parse_photometry_data(
802 sourceContent, TNSId)
803 photoTable += phot
804 relatedFilesTable += relatedFiles
806 # SPECTRA
807 spec, relatedFiles = self._parse_spectral_data(
808 sourceContent, TNSId)
809 specTable += spec
810 relatedFilesTable += relatedFiles
812 # SORT BY SEPARATION FROM THE SEARCH COORDINATES
813 try:
814 sourceTable = sorted(sourceTable, key=itemgetter(
815 'separationArcsec'), reverse=False)
816 except:
817 pass
819 self.log.debug('completed the ``get`` method')
820 return sourceTable, photoTable, specTable, relatedFilesTable
822 def _get_tns_search_results(
823 self):
824 """
825 *query the tns and result the response*
826 """
827 self.log.debug('starting the ``_get_tns_search_results`` method')
829 try:
830 response = requests.get(
831 url="https://www.wis-tns.org/search",
832 params={
833 "page": self.page,
834 "ra": self.ra,
835 "decl": self.dec,
836 "radius": self.radiusArcsec,
837 "name": self.name,
838 "internal_name": self.internal_name,
839 "discovered_period_units": self.period_units,
840 "discovered_period_value": self.discInLastDays,
841 "num_page": self.batchSize,
842 "display[redshift]": "1",
843 "display[hostname]": "1",
844 "display[host_redshift]": "1",
845 "display[source_group_name]": "1",
846 "display[internal_name]": "1",
847 "display[spectra_count]": "1",
848 "display[discoverymag]": "1",
849 "display[discmagfilter]": "1",
850 "display[discoverydate]": "1",
851 "display[discoverer]": "1",
852 "display[sources]": "1",
853 "display[bibcode]": "1",
854 },
855 headers={
856 'User-Agent': self.settings["user-agent"]
857 }
858 )
860 except requests.exceptions.RequestException:
861 print('HTTP Request failed')
863 self.log.debug('completed the ``_get_tns_search_results`` method')
864 try:
865 # PYTHON 3
866 return response.status_code, str(response.content, 'utf-8'), response.url
867 except:
868 # PYTHON 2
869 return response.status_code, response.content, response.url
871 def _file_prefix(
872 self):
873 """*Generate a file prefix based on the type of search for saving files to disk*
875 **Return**
877 - ``prefix`` -- the file prefix
879 """
880 self.log.debug('starting the ``_file_prefix`` method')
882 if self.ra:
883 now = datetime.now()
884 prefix = now.strftime("%Y%m%dt%H%M%S%f_tns_conesearch_")
885 elif self.name:
886 prefix = self.name + "_tns_conesearch_"
887 elif self.internal_name:
888 prefix = self.internal_name + "_tns_conesearch_"
889 elif self.discInLastDays:
890 discInLastDays = str(self.discInLastDays)
891 now = datetime.now()
892 prefix = now.strftime(
893 discInLastDays + "d_since_%Y%m%d_tns_conesearch_")
895 self.log.debug('completed the ``_file_prefix`` method')
896 return prefix
898 def _parse_transient_rows(
899 self,
900 content,
901 count=False):
902 """* parse transient rows from the TNS result page content*
904 **Key Arguments**
906 - ``content`` -- the content from the TNS results page.
907 - ``count`` -- return only the number of rows
910 **Return**
912 - ``transientRows``
914 """
915 self.log.debug('starting the ``_parse_transient_rows`` method')
917 regexForRow = r"""\n([^\n]*?<a href="/object/.*?)(?=\n[^\n]*?<a href="/object/|<\!\-\- /\.section, /#content \-\->)"""
919 if count:
920 # A SINGLE SOURCE BLOCK
921 matchedSources = re.findall(
922 regexForRow,
923 content,
924 flags=re.S # re.S
925 )
926 return len(matchedSources)
928 # A SINGLE SOURCE BLOCK
929 matchedSources = re.finditer(
930 regexForRow,
931 content,
932 flags=re.S # re.S
933 )
935 self.log.debug('completed the ``_parse_transient_rows`` method')
936 return matchedSources
938 def _parse_discovery_information(
939 self,
940 content):
941 """* parse discovery information from one row on the TNS results page*
943 **Key Arguments**
945 - ``content`` -- a table row from the TNS results page.
948 **Return**
950 - ``discoveryData`` -- dictionary of results
951 - ``TNSId`` -- the unique TNS id for the transient
953 """
954 self.log.debug('starting the ``_parse_discovery_information`` method')
956 # ASTROCALC UNIT CONVERTER OBJECT
957 converter = unit_conversion(
958 log=self.log
959 )
961 matches = re.finditer(
962 r"""<tr class="row-.*?"><td class="cell-id">(?P<tnsId>\d*?)</td><td class="cell-name"><a href="(?P<objectUrl>.*?)">(?P<TNSName>.*?)</a></td><td class="cell-.*?<td class="cell-ra">(?P<raSex>.*?)</td><td class="cell-decl">(?P<decSex>.*?)</td><td class="cell-ot_name">(?P<specType>.*?)</td><td class="cell-redshift">(?P<transRedshift>.*?)</td><td class="cell-hostname">(?P<hostName>.*?)</td><td class="cell-host_redshift">(?P<hostRedshift>.*?)</td><td class="cell-reporting_group_name">(?P<reportingSurvey>.*?)</td><td class="cell-source_group_name">(?P<discSurvey>.*?)</td>.*?<td class="cell-internal_name">(<a.*?>)?(?P<discoveryName>.*?)(</a>)?</td>.*?<td class="cell-discoverymag">(?P<discMag>.*?)</td><td class="cell-disc_filter_name">(?P<discMagFilter>.*?)</td><td class="cell-discoverydate">(?P<discDate>.*?)</td><td class="cell-discoverer">(?P<sender>.*?)</td>.*?</tr>""",
963 content,
964 flags=0 # re.S
965 )
966 discoveryData = []
967 for match in matches:
968 row = match.groupdict()
969 for k, v in list(row.items()):
970 row[k] = v.strip()
971 if len(v) == 0:
972 row[k] = None
973 if row["transRedshift"] == 0:
974 row["transRedshift"] = None
975 if row["TNSName"][0] in ["1", "2"]:
976 row["TNSName"] = "SN" + row["TNSName"]
977 row["objectUrl"] = "https://www.wis-tns.org" + \
978 row["objectUrl"]
980 # CONVERT COORDINATES TO DECIMAL DEGREES
981 row["raDeg"] = converter.ra_sexegesimal_to_decimal(
982 ra=row["raSex"]
983 )
984 row["decDeg"] = converter.dec_sexegesimal_to_decimal(
985 dec=row["decSex"]
986 )
988 # IF THIS IS A COORDINATE SEARCH, ADD SEPARATION FROM
989 # ORIGINAL QUERY COORDINATES
990 if self.ra:
991 # CALCULATE SEPARATION IN ARCSEC
992 from astrocalc.coords import separations
993 calculator = separations(
994 log=self.log,
995 ra1=self.ra,
996 dec1=self.dec,
997 ra2=row["raDeg"],
998 dec2=row["decDeg"],
999 )
1000 angularSeparation, north, east = calculator.get()
1001 row["separationArcsec"] = angularSeparation
1002 row["separationNorthArcsec"] = north
1003 row["separationEastArcsec"] = east
1005 if not row["discSurvey"]:
1006 row["survey"] = row["sender"]
1008 del row["sender"]
1009 del row["tnsId"]
1010 row["TNSName"] = row["TNSName"].replace(" ", "")
1011 row["TNSId"] = row["TNSName"].replace(
1012 "SN", "").replace("AT", "")
1013 TNSId = row["TNSId"]
1015 # ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
1016 orow = collections.OrderedDict()
1017 keyOrder = ["TNSId", "TNSName", "discoveryName", "discSurvey", "raSex", "decSex", "raDeg", "decDeg",
1018 "transRedshift", "specType", "discMag", "discMagFilter", "discDate", "objectUrl", "hostName", "hostRedshift", "separationArcsec", "separationNorthArcsec", "separationEastArcsec"]
1019 for k, v in list(row.items()):
1020 if k not in keyOrder:
1021 keyOrder.append(k)
1022 for k in keyOrder:
1023 try:
1024 orow[k] = row[k]
1025 except:
1026 self.log.info(
1027 "`%(k)s` not found in the source data for %(TNSId)s" % locals())
1028 pass
1029 discoveryData.append(row)
1031 self.log.debug('completed the ``_parse_discovery_information`` method')
1032 return discoveryData[0], TNSId
1034 def _parse_photometry_data(
1035 self,
1036 content,
1037 TNSId):
1038 """*parse photometry data from a row in the tns results content*
1040 **Key Arguments**
1042 - ``content`` -- a table row from the TNS results page
1043 - ``TNSId`` -- the tns id of the transient
1046 **Return**
1048 - ``photData`` -- a list of dictionaries of the photometry data
1049 - ``relatedFilesTable`` -- a list of dictionaries of transient photometry related files
1051 """
1052 self.log.debug('starting the ``_parse_photometry_data`` method')
1054 photData = []
1055 relatedFilesTable = []
1057 # AT REPORT BLOCK
1058 ATBlock = re.search(
1059 r"""<tr class=[^\n]*?AT reports.*?(?=<tr class=[^\n]*?Classification reports|$)""",
1060 content,
1061 flags=re.S # re.S
1062 )
1064 if ATBlock:
1065 ATBlock = ATBlock.group()
1066 reports = re.finditer(
1067 r"""<tr class="row-[^"]*"><td class="cell-id">.*?</table>""",
1068 ATBlock,
1069 flags=re.S # re.S
1070 )
1072 relatedFiles = self._parse_related_files(ATBlock)
1074 for r in reports:
1075 header = re.search(
1076 r"""<tr class="row[^"]*".*?time_received">(?P<reportAddedDate>[^<]*).*?user_name">(?P<sender>[^<]*).*?reporter_name">(?P<reporters>[^<]*).*?reporting_group_name">(?P<reportingGroup>[^<]*).*?source_group_name">(?P<surveyGroup>[^<]*).*?ra">(?P<ra>[^<]*).*?decl">(?P<dec>[^<]*).*?discovery_date">(?P<obsDate>[^<]*).*?flux">(?P<mag>[^<]*).*?filter_name">(?P<magFilter>[^<]*).*?related_files">(?P<relatedFiles>[^<]*).*?type_name">(?P<suggestedType>[^<]*).*?hostname">(?P<hostName>[^<]*).*?host_redshift">(?P<hostRedshift>[^<]*).*?internal_name">(?P<objectName>[^<]*).*?groups">(?P<survey>[^<]*).*?remarks">(?P<sourceComment>[^<]*)""",
1077 r.group(),
1078 flags=0 # re.S
1079 )
1080 try:
1081 header = header.groupdict()
1082 except:
1083 print(r.group())
1084 header["TNSId"] = TNSId
1086 del header["reporters"]
1087 del header["surveyGroup"]
1088 del header["hostName"]
1089 del header["hostRedshift"]
1090 del header["mag"]
1091 del header["magFilter"]
1092 del header["obsDate"]
1093 del header["ra"]
1094 del header["dec"]
1096 if not self.comments:
1097 del header['sourceComment']
1098 else:
1099 theseComments = header[
1100 "sourceComment"].split("\n")
1101 header["sourceComment"] = ""
1102 for c in theseComments:
1103 header["sourceComment"] += " " + c.strip()
1104 header["sourceComment"] = header[
1105 "sourceComment"].strip()[0:750]
1107 phot = re.finditer(
1108 r"""<tr class="row\-[^"]*".*?obsdate">(?P<obsdate>[^<]*).*?flux">(?P<mag>[^<]*).*?fluxerr">(?P<magErr>[^<]*).*?limflux">(?P<limitingMag>[^<]*).*?unit_name">(?P<magUnit>[^<]*).*?filter_name">(?P<filter>[^<]*).*?tel_inst">(?P<telescope>[^<]*).*?exptime">(?P<exptime>[^<]*).*?observer">(?P<observer>[^<]*).*?-remarks">(?P<remarks>[^<]*)""",
1109 r.group(),
1110 flags=0 # re.S
1111 )
1112 filesAppended = False
1113 for p in phot:
1114 p = p.groupdict()
1115 del p["observer"]
1117 if p["limitingMag"] and not p["mag"]:
1118 p["mag"] = p["limitingMag"]
1119 p["limitingMag"] = 1
1120 p["remarks"] = p["remarks"].replace(
1121 "[Last non detection]", "")
1122 else:
1123 p["limitingMag"] = 0
1125 if not self.comments:
1126 del p["remarks"]
1128 p.update(header)
1130 if p["relatedFiles"] and filesAppended == False:
1131 filesAppended = True
1132 for f in relatedFiles:
1133 # ORDER THE DICTIONARY FOR THIS ROW OF
1134 # RESULTS
1135 thisFile = collections.OrderedDict()
1136 thisFile["TNSId"] = TNSId
1137 thisFile["filename"] = f[
1138 "filepath"].split("/")[-1]
1139 thisFile["url"] = f["filepath"]
1140 if self.comments:
1141 thisFile["comment"] = f[
1142 "fileComment"].replace("\n", " ")[0:750]
1143 thisFile["dateObs"] = p["obsdate"]
1144 thisFile["spec1phot2"] = 2
1145 relatedFilesTable.append(thisFile)
1147 if not p["survey"] and not p["objectName"]:
1148 p["survey"] = p["sender"]
1150 del p["relatedFiles"]
1151 del p["sender"]
1153 # ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
1154 orow = collections.OrderedDict()
1155 keyOrder = ["TNSId", "survey", "obsdate", "filter", "limitingMag", "mag", "magErr",
1156 "magUnit", "suggestedType", "telescope", "exptime", "reportAddedDate"]
1157 for k, v in list(p.items()):
1158 if k not in keyOrder:
1159 keyOrder.append(k)
1160 for k in keyOrder:
1161 try:
1162 orow[k] = p[k]
1163 except:
1164 self.log.info(
1165 "`%(k)s` not found in the source data for %(TNSId)s" % locals())
1166 pass
1168 photData.append(orow)
1170 self.log.debug('completed the ``_parse_photometry_data`` method')
1171 return photData, relatedFilesTable
1173 def _parse_related_files(
1174 self,
1175 content):
1176 """*parse the contents for related files URLs and comments*
1178 **Key Arguments**
1180 - ``content`` -- the content to parse.
1183 **Return**
1185 - ``relatedFiles`` -- a list of dictionaries of transient related files
1187 """
1188 self.log.debug('starting the ``_parse_related_files`` method')
1190 relatedFilesList = re.finditer(
1191 r"""<td class="cell-filename">.*?href="(?P<filepath>[^"]*).*?remarks">(?P<fileComment>[^<]*)""",
1192 content,
1193 flags=0 # re.S
1194 )
1196 relatedFiles = []
1197 for f in relatedFilesList:
1198 f = f.groupdict()
1199 relatedFiles.append(f)
1201 self.log.debug('completed the ``_parse_related_files`` method')
1202 return relatedFiles
1204 def _parse_spectral_data(
1205 self,
1206 content,
1207 TNSId):
1208 """*parse spectra data from a row in the tns results content*
1210 **Key Arguments**
1212 - ``content`` -- a table row from the TNS results page
1213 - ``TNSId`` -- the tns id of the transient
1216 **Return**
1218 - ``specData`` -- a list of dictionaries of the spectral data
1219 - ``relatedFilesTable`` -- a list of dictionaries of transient spectrum related files
1221 """
1222 self.log.debug('starting the ``_parse_spectral_data`` method')
1224 specData = []
1225 relatedFilesTable = []
1227 # CLASSIFICATION BLOCK
1228 classBlock = re.search(
1229 r"""<tr class=[^\n]*?Classification reports.*$""",
1230 content,
1231 flags=re.S # re.S
1232 )
1234 if classBlock:
1235 classBlock = classBlock.group()
1237 reports = re.finditer(
1238 r"""<tr class="row-[^"]*"><td class="cell-id">.*?</tbody>\s*</table>\s*</div></td> </tr>\s*</tbody>\s*</table>\s*</div></td> </tr>""",
1239 classBlock,
1240 flags=re.S #
1241 )
1243 relatedFiles = self._parse_related_files(classBlock)
1245 for r in reports:
1247 header = re.search(
1248 r"""<tr class="row.*?time_received">(?P<reportAddedDate>[^<]*).*?user_name">(?P<TNSuser>[^<]*).*?classifier_name">(?P<reporters>[^<]*).*?source_group_name">(?P<survey>[^<]*).*?-type">(?P<specType>[^<]*).*?-redshift">(?P<transRedshift>[^<]*).*?-related_files">(?P<relatedFiles>[^<]*).*?-groups">(?P<surveyGroup>[^<]*).*?-remarks">(?P<sourceComment>[^<]*)</td>""",
1249 r.group(),
1250 flags=re.S # re.S
1251 )
1252 if not header:
1253 continue
1255 header = header.groupdict()
1256 header["TNSId"] = TNSId
1258 del header["reporters"]
1259 del header["surveyGroup"]
1260 del header["survey"]
1262 if not self.comments:
1263 del header['sourceComment']
1264 else:
1265 theseComments = header[
1266 "sourceComment"].split("\n")
1267 header["sourceComment"] = ""
1268 for c in theseComments:
1269 header["sourceComment"] += " " + c.strip()
1270 header["sourceComment"] = header[
1271 "sourceComment"]
1273 spec = re.finditer(
1274 r"""<tr class="row-.*?-obsdate">(?P<obsdate>[^<]*).*?-tel_inst">(?P<telescope>[^<]*).*?-exptime">(?P<exptime>[^<]*).*?-observer">(?P<sender>[^<]*).*?-reducer">(?P<reducer>[^<]*).*?-source_group_name">(?P<survey>[^<]*).*?-asciifile">(.*?<a href="(?P<filepath>[^"]*)".*?</a>)?.*?-fitsfile">(.*?<a href="(?P<fitsFilepath>[^"]*)".*?</a>)?.*?-groups">(?P<surveyGroup>[^<]*).*?-remarks">(?P<remarks>[^<]*)""",
1275 r.group(),
1276 flags=0 # re.S
1277 )
1278 filesAppended = False
1279 for s in spec:
1281 s = s.groupdict()
1282 del s["sender"]
1283 del s["surveyGroup"]
1284 del s["reducer"]
1286 if not self.comments:
1287 del s["remarks"]
1288 else:
1289 s["remarks"] = s["remarks"].replace('"', "'")[0:750]
1291 s.update(header)
1293 if s["relatedFiles"] and filesAppended == False:
1294 filesAppended = True
1295 for f in relatedFiles:
1296 # ORDER THE DICTIONARY FOR THIS ROW OF
1297 # RESULTS
1298 thisFile = collections.OrderedDict()
1299 thisFile["TNSId"] = TNSId
1300 thisFile["filename"] = f[
1301 "filepath"].split("/")[-1]
1302 thisFile["url"] = f["filepath"]
1303 if self.comments:
1304 thisFile["comment"] = f[
1305 "fileComment"].replace("\n", " ").strip()
1306 thisFile["dateObs"] = s["obsdate"]
1307 thisFile["spec1phot2"] = 1
1308 relatedFilesTable.append(thisFile)
1310 for ffile in [s["filepath"], s["fitsFilepath"]]:
1311 if ffile:
1312 # ORDER THE DICTIONARY FOR THIS ROW OF
1313 # RESULTS
1314 thisFile = collections.OrderedDict()
1315 thisFile["TNSId"] = TNSId
1316 thisFile["filename"] = ffile.split(
1317 "/")[-1]
1318 thisFile["url"] = ffile
1319 if self.comments:
1320 thisFile["comment"] = ""
1321 thisFile["dateObs"] = s["obsdate"]
1322 thisFile["spec1phot2"] = 1
1323 relatedFilesTable.append(thisFile)
1325 del s["filepath"]
1326 del s["fitsFilepath"]
1327 del s["relatedFiles"]
1329 # ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
1330 orow = collections.OrderedDict()
1331 keyOrder = ["TNSId", "survey", "obsdate", "specType", "transRedshift",
1332 "telescope", "exptime", "reportAddedDate", "TNSuser"]
1333 for k, v in list(s.items()):
1334 if k not in keyOrder:
1335 keyOrder.append(k)
1336 for k in keyOrder:
1337 try:
1338 orow[k] = s[k]
1339 except:
1340 self.log.info(
1341 "`%(k)s` not found in the source data for %(TNSId)s" % locals())
1342 pass
1344 specData.append(orow)
1346 self.log.debug('completed the ``_parse_spectral_data`` method')
1347 return specData, relatedFilesTable