Commits
Takahiro Tsutsumi authored and Ville Suoranta committed 6f83e42ed2a Merge
1 + | ''' |
2 + | main module for retreiving JPL-Horizons ephemeris data and |
3 + | convert it to CASA readable format (i.e. CASA table ) |
4 + | ''' |
5 + | import os |
6 + | import time |
7 + | from math import sqrt, sin, cos, log |
8 + | import numpy as np |
9 + | |
10 + | from casatools import table, quanta, measures |
11 + | from casatasks import casalog |
12 + | |
13 + | _tb = table() |
14 + | _qa = quanta() |
15 + | _me = measures() |
16 + | |
17 + | # debug |
18 + | _debug = False |
19 + | |
20 + | def gethorizonsephem(objectname, starttime, stoptime, incr, outtable, asis=False, rawdatafile=''): |
21 + | """ |
22 + | Main driver function for ephemeris data query from JPL-Horizons |
23 + | |
24 + | arguments: |
25 + | objectname: ephemeris object name (case insensitive). Try to convert |
26 + | a common name to ID if asis = False. |
27 + | starttime: start time of the ephemeris data (expects YYYY/MM/DD/HH:MM |
28 + | format |
29 + | stoptime: stop (end) time of the ephemeris data in the same format as |
30 + | starttime |
31 + | incr: time increment (interval) of the ephemeris data |
32 + | outtable: output CASA table name |
33 + | asis: a toggle for additinal check to resolve object name |
34 + | rawdatafile: raw query result file name (optional) |
35 + | """ |
36 + | |
37 + | # commented ones are not currently supported in setjy |
38 + | # Juno's table is exist in the data repo but not supported in setjy |
39 + | asteroids = {'ceres': '1;', |
40 + | 'pallas': '2;', |
41 + | 'juno': '3;', # large crater and temperature varies |
42 + | 'vesta': '4;', |
43 + | 'lutetia': '21;' |
44 + | } |
45 + | planets_and_moons = {'sun': '10', |
46 + | 'mercury': '199', |
47 + | 'venus': '299', |
48 + | 'moon': '301', |
49 + | 'mars': '499', |
50 + | 'jupiter': '599', |
51 + | 'io': '501', |
52 + | 'europa': '502', |
53 + | 'ganymede': '503', |
54 + | 'callisto': '504', |
55 + | 'saturn': '699', |
56 + | 'titan': '606', |
57 + | 'uranus': '799', |
58 + | 'neptune': '899', |
59 + | 'pluto': '999'} |
60 + | known_objects = planets_and_moons |
61 + | known_objects.update(asteroids) |
62 + | # default quantities (required by setjy) for CASA |
63 + | quantities = '1,14,15,17,19,20,24' |
64 + | ang_format = 'DEG' |
65 + | |
66 + | # check objectname. If it matches with the known object list. If asis is specified |
67 + | # it will skip the check. |
68 + | # For small bodies such as comets, objectname must include 'DES=' following designated |
69 + | # object name or id (SPK ID). Based on the JPL-Horizons documentaiton, for IAU ID, 'DES=' |
70 + | # can be omitted. https://ssd.jpl.nasa.gov/horizons/app.html#command |
71 + | start_time = None |
72 + | stop_time = None |
73 + | step_size = None |
74 + | if not asis: |
75 + | if not objectname.lower() in known_objects: |
76 + | raise ValueError( |
77 + | f"{objectname} is not in the known object list for CASA. To skip this check set asis=True") |
78 + | else: |
79 + | target = known_objects[objectname.lower()] |
80 + | else: |
81 + | target = objectname |
82 + | try: |
83 + | if starttime.startswith('JD') or starttime.startswith('MJD'): |
84 + | start_time = starttime |
85 + | stop_time = stoptime |
86 + | else: |
87 + | start_time = _qa.time(starttime, form='ymd') |
88 + | stop_time = _qa.time(stoptime, form='ymd') |
89 + | except ValueError as e: |
90 + | casalog.post(e) |
91 + | |
92 + | try: |
93 + | step_size = incr.replace(' ', '') |
94 + | except ValueError as e: |
95 + | casalog.post(e) |
96 + | |
97 + | if _debug: |
98 + | print("target=",target) |
99 + | print(f"start_time={start_time}, stop_time={stop_time}, step_size={step_size}") |
100 + | print(f"quantities={quantities}, ang_format={ang_format}") |
101 + | ephemdata = queryhorizons(target, start_time, stop_time, step_size, quantities, ang_format, rawdatafile) |
102 + | # return ephemdata |
103 + | if ephemdata and 'result' in ephemdata: |
104 + | casalog.post('converting ephemeris data to a CASA table') |
105 + | tocasatb(ephemdata, outtable) |
106 + | if not os.path.exists(outtable): |
107 + | casalog.post('Failed to produce the CASA ephemeris table', 'WARN') |
108 + | if rawdatafile=='': |
109 + | casalog.post('You may want to re-run the task with rawdatafile'+\ |
110 + | ' parameter specified and check the content of the output raw data file for the further information on the error.','WARN') |
111 + | else: |
112 + | casalog.post(f'Please check the content of {rawdatafile} for the further information of the error','WARN') |
113 + | |
114 + | |
115 + | def queryhorizons(target, starttime, stoptime, stepsize, quantities, ang_format, rawdatafile=''): |
116 + | """ |
117 + | Submit a query to the JPL-Horizons API |
118 + | |
119 + | arguments: |
120 + | target: a target solar system object name/id (str) |
121 + | starttime: ephemeris start time (e.g. '2021/12/01/00:00') |
122 + | stoptime: ephemeris stop time |
123 + | stepsize: ephemeris data time increment |
124 + | quantities: output data quantities given as a list of integers which represent the specific |
125 + | data as defined in https://ssd,jpl.nasa.gov/horizons/manual.html |
126 + | ang_format: RA, Dec output format ('DEG' or 'HMS') |
127 + | rawdatafile: a file name to save raw query results (Optional) |
128 + | |
129 + | """ |
130 + | import ast |
131 + | import certifi |
132 + | import ssl |
133 + | from urllib.request import Request, urlopen |
134 + | from urllib.parse import urlencode |
135 + | from urllib.error import URLError |
136 + | import socket |
137 + | |
138 + | context = ssl.create_default_context(cafile=certifi.where()) |
139 + | urlbase = 'https://ssd.jpl.nasa.gov/api/horizons.api' |
140 + | data = None |
141 + | |
142 + | values = {'format': 'json', |
143 + | 'EPHEM_TYPE': 'OBSERVER', |
144 + | 'OBJ_DATA': 'YES', |
145 + | 'COMMAND': f"'{target}'", |
146 + | 'START_TIME': starttime, |
147 + | 'STOP_TIME': stoptime, |
148 + | 'STEP_SIZE': stepsize, |
149 + | 'CENTER': '500@399', |
150 + | 'QUANTITIES': f"'{quantities}'", |
151 + | 'ANG_FORMAT': ang_format} |
152 + | |
153 + | pardata = urlencode(values, doseq=True, encoding='utf-8') |
154 + | params = pardata.encode('ascii') |
155 + | # for debugging |
156 + | #print("params=", params) |
157 + | |
158 + | req = Request(urlbase, params) |
159 + | datastr = None |
160 + | try: |
161 + | with urlopen(req, context=context, timeout=10) as response: |
162 + | datastr = response.read().decode() |
163 + | except URLError as e: |
164 + | if hasattr(e, 'reason'): |
165 + | msg = f'URLError: Failed to reach URL={urlbase} (reason: {e.reason})' |
166 + | casalog.post(msg, 'WARN') |
167 + | if hasattr(e, 'code'): |
168 + | msg = f'URLError: Couldn\'t fullfill the request to {urlbase} (code: {e.code})' |
169 + | casalog.post(msg, 'WARN') |
170 + | except socket.timeout as e: |
171 + | msg = f'Failed to reach URL={urlbase}. Socket timeout {e}' |
172 + | casalog.post(msg, 'WARN') |
173 + | |
174 + | status = response.getcode() |
175 + | if datastr: |
176 + | try: |
177 + | data = ast.literal_eval(datastr) |
178 + | except RuntimeError as e: |
179 + | casalog.post(e, 'SEVERE') |
180 + | if status == 200: |
181 + | # |
182 + | # If the ephemeris data file was generated, write it to the output file: |
183 + | if rawdatafile != "": |
184 + | if os.path.exists(rawdatafile): |
185 + | os.remove(rawdatafile) |
186 + | if "result" in data: |
187 + | with open(rawdatafile, "w") as f: |
188 + | f.write(data["result"]) |
189 + | # Otherwise, the ephemeris file was not generated so output an error |
190 + | else: |
191 + | casalog.post("ERROR: No data found. Ephemeris data file not generated", 'WARN') |
192 + | else: |
193 + | raise RuntimeError(f'Could not retrieve the data. Error code:{status}:{response.msg}') |
194 + | else: |
195 + | data = None |
196 + | return data |
197 + | |
198 + | |
199 + | def tocasatb(indata, outtable): |
200 + | """ |
201 + | convert a JPL-Horizons query results to a CASA table |
202 + | indata: either query data ('results') or file name that contains the result data |
203 + | outtable: output CASA table name |
204 + | """ |
205 + | # input data columns |
206 + | # Date__(UT)__HR:MN R.A.___(ICRF)___DEC ObsSub-LON ObsSub-LAT SunSub-LON SunSub-LAT ¥ |
207 + | # NP.ang NP.dist r rdot delta deldot S-T-O |
208 + | import re |
209 + | |
210 + | # output CASA table columns |
211 + | # These are required columns for SetJy |
212 + | cols = { |
213 + | 'MJD': {'header': 'Date__\(UT\)', |
214 + | 'comment': 'date in MJD', |
215 + | 'unit': 'd'}, |
216 + | 'RA': {'header': 'R.A.', |
217 + | 'comment': 'astrometric Right Ascension (ICRF/J2000)', |
218 + | 'unit': 'deg'}, |
219 + | 'DEC': {'header': 'DEC', |
220 + | 'comment': 'astrometric Declination (ICRF/J2000)', |
221 + | 'unit': 'deg'}, |
222 + | 'Rho': {'header': 'delta', |
223 + | 'comment': 'geocentric distance', |
224 + | 'unit': 'AU'}, |
225 + | 'RadVel': {'header': 'deldot', |
226 + | 'comment': 'geocentric distance rate', |
227 + | 'unit': 'AU/d'}, |
228 + | 'NP_ang': {'header': 'NP.ang', |
229 + | 'comment': 'North-Pole pos. angle', |
230 + | 'unit': 'deg'}, |
231 + | 'NP_dist': {'header': 'NP.dist', |
232 + | 'comment': 'North-Pole ang. distance', |
233 + | 'unit': 'arcsec'}, |
234 + | 'DiskLong': {'header': 'ObsSub-LON', |
235 + | 'comment': 'Sub-observer longitude', |
236 + | 'unit': 'deg'}, |
237 + | 'DiskLat': {'header': r'ObsSub-LAT', |
238 + | 'comment': 'Sub-observer latitude', |
239 + | 'unit': 'deg'}, |
240 + | 'Sl_lon': {'header': 'SunSub-LON', |
241 + | 'comment': 'Sub-Solar longitude', |
242 + | 'unit': 'deg'}, |
243 + | 'Sl_lat': {'header': r'SunSub-LAT', |
244 + | 'comment': 'Sub-Solar longitude', |
245 + | 'unit': 'deg'}, |
246 + | 'r': {'header': 'r', |
247 + | 'comment': 'heliocentric distance', |
248 + | 'unit': 'AU'}, |
249 + | 'rdot': {'header': 'rdot', |
250 + | 'comment': 'heliocentric distance rate', |
251 + | 'unit': 'km/s'}, |
252 + | 'phang': {'header': 'S-T-O', |
253 + | 'comment': 'phase angle', |
254 + | 'unit': 'deg'} |
255 + | } |
256 + | |
257 + | # do in a two-step |
258 + | # read the original query result dictionary containing ephemeris data and save the data part |
259 + | # to a temporary text file. Then re-read the temporary file to a casa table |
260 + | # with the approriate data format that setjy and measure expect. |
261 + | tempfname = 'temp_ephem_'+str(os.getpid())+'.dat' |
262 + | tempconvfname = 'temp_ephem_conv_'+str(os.getpid())+'.dat' |
263 + | try: |
264 + | exceedthelinelimit = None |
265 + | ambiguousname = None |
266 + | queryerrmsg = '' |
267 + | # Scan the original data |
268 + | if isinstance(indata, dict) and 'result' in indata: |
269 + | #print("ephem data dict") |
270 + | ephemdata = indata['result'] |
271 + | elif isinstance(indata, str): |
272 + | if os.path.exists(indata): |
273 + | with open(indata, 'r') as infile: |
274 + | ephemdata = infile.readlines() |
275 + | # the relevant information in the main header section is extracted as |
276 + | # it is read line by line. The ephemeris data is stored in the separate text file |
277 + | # to be further processed in subsequent steps. |
278 + | with open(tempfname, 'w') as outfile: |
279 + | # Some initialization |
280 + | headerdict = {} |
281 + | # jplhorizonsdataIdFound = False |
282 + | datalines = 0 |
283 + | readthedata = False |
284 + | startmjd = None |
285 + | endmjd = None |
286 + | incolnames = None |
287 + | # multiple entries (in different units) may exit for orb. per. |
288 + | foundorbper = False |
289 + | ### |
290 + | lcnt = 0 |
291 + | # for lnum, line in enumerate(infile): |
292 + | for lnum, line in enumerate(ephemdata.split('\n')): |
293 + | # JPL-Horizons data should contain this line at the beginning |
294 + | if re.search(r'JPL/HORIZONS', line): |
295 + | # jplhorizondataIdFound = True |
296 + | casalog.post("Looks like JPL-Horizons data","INFO2") |
297 + | elif re.search(r'^\s*Ephemeris\s+', line): # date the data file was retrieved and created |
298 + | #m = re.search(r'(API_USER\S+\s+(\S+)\s+([0-9]+)\s+(\S+)\s+(\S+)') |
299 + | (_, _, _, wday, mon, day, tm, year, _) = re.split(' +', line, 8) |
300 + | # date info in 3-7th items |
301 + | |
302 + | try: |
303 + | float(mon) |
304 + | nmon = mon |
305 + | except: |
306 + | tonummon=time.strptime(mon,"%b") |
307 + | nmon = f"{tonummon.tm_mon:02d}" |
308 + | day2 = f"{int(day):02d}" |
309 + | headerdict['VS_CREATE'] = year + '/' + nmon + '/' + day2 + '/' + tm[0:5] |
310 + | # VS_DATE - use the current time to indicate the time CASA table is created |
311 + | headerdict['VS_DATE'] = time.strftime('%Y/%m/%d/%H:%M',time.gmtime() ) |
312 + | headerdict['VS_TYPE'] = 'Table of comet/planetary positions' |
313 + | # VERSION stored in the output table may be incremented in the future. |
314 + | # For now, it is fixed, but it may be incremented from 0003 to 0004 to indiate |
315 + | # this new code is used to convert the jpl horizons data to a table. |
316 + | headerdict['VS_VERSION'] = '0004.000' |
317 + | # target object name |
318 + | elif re.match(r'^[>\s]*Target body name', line): |
319 + | m = re.match(r'^[>\s]*Target body name:\s+\d*\s*(\w+)', line) |
320 + | if m: |
321 + | headerdict['NAME'] = m[1] |
322 + | # start time (of the requested time range) |
323 + | elif re.search(r'Start time', line): |
324 + | m = re.match(r'^[>\s]*Start time\s+\S+\s+\S+\s+(\S+)\s+(\S+)\s+(\w+)', line) |
325 + | if m: |
326 + | startmjd = _qa.totime(m[1] + '/' + m[2]) |
327 + | #--This info will not be used but left here since it might be useful fo debugging. |
328 + | # # end time (of the requested time range) |
329 + | elif re.search(r'Stop time', line): |
330 + | m = re.match(r'^[>\s]*Stop time\s+\S+\s+\S+\s+(\S+)\s+(\S+)\s+(\w+)', line) |
331 + | if m: |
332 + | endmjd = _qa.totime(m[1] + '/' + m[2]) |
333 + | # date increment |
334 + | elif re.search(r'Step-size', line): |
335 + | m = re.match(r'^[>\s]*Step-size\s+\S+\s+(\S+)\s+(\w+)', line) |
336 + | if m: |
337 + | unit = m[2] |
338 + | if unit == 'minutes': |
339 + | # this is the default unit returned by the JPL-Horizons |
340 + | theunit = 'min' |
341 + | elif unit == 'hours': |
342 + | theunit = 'h' |
343 + | elif unit == 'days': |
344 + | theunit = 'd' |
345 + | elif unit == 'steps': |
346 + | theunit = 'steps' |
347 + | elif unit == 'calendar': |
348 + | raise RuntimeError('Unit of Step-size in calendar month or year is not supported.') |
349 + | else: |
350 + | raise RuntimeError(f'Unit of Step-size, {unit} is not recognized.') |
351 + | if theunit == 'd': |
352 + | dmjd = m[1] |
353 + | elif theunit == 'steps': |
354 + | # print('endtime=',endmjd['value']) |
355 + | dmjd = (endmjd['value'] - startmjd['value'])/int(m[1]) |
356 + | else: |
357 + | dmjd = _qa.convert(_qa.totime(m[1] + theunit), 'd')['value'] |
358 + | headerdict['dMJD'] = dmjd |
359 + | if startmjd is not None: # start mjd should be available before step-size line |
360 + | # MJD0 = firstMJD - dMJD (as defined casacore MeasComet documentation) |
361 + | headerdict['MJD0'] = startmjd['value'] - dmjd |
362 + | elif re.search(r'Center geodetic', line): |
363 + | m = re.match(r'^[>\s]*Center geodetic\s*: ([-+0-9.]+,\s*[-+0-9.]+,\s*[-+0-9.]+)', line) |
364 + | if m: |
365 + | long_lat_alt = m[1].split(',') |
366 + | headerdict['GeoLong'] = float(long_lat_alt[0]) |
367 + | headerdict['GeoLat'] = float(long_lat_alt[1]) |
368 + | headerdict['GeoDist'] = float(long_lat_alt[2]) |
369 + | # obs location |
370 + | elif re.search(r'Center-site name', line): |
371 + | m = re.match(r'^[>\s]*Center-site name:\s+(\w+)', line) |
372 + | if m: |
373 + | headerdict['obsloc'] = m[1] |
374 + | headerdict['posrefsys'] = 'ICRF/J2000.0' |
375 + | elif re.search(r'Target radii', line): |
376 + | m = re.match(r'^[>\s]*Target radii\s*:\s*([0-9.]+\s*x\s*[0-9.]+\s*x\s*[0-9.]+)\s*km.*|' |
377 + | '^[>/s]*Target radii\s*:\s*([0-9.]+)\s*km', line) |
378 + | if m: |
379 + | matchlist = m.groups() |
380 + | radiiarr = np.zeros(3) |
381 + | if len(matchlist)==2: |
382 + | if m[2] is None: |
383 + | radiiarr = np.asarray(np.array(m[1].split(' x ')), dtype=np.float64) |
384 + | headerdict['radii'] = {'unit': 'km', 'value': radiiarr} |
385 + | elif m[1] is None: |
386 + | radiiarr = np.array([m[2],m[2],m[2]], dtype=np.float64) |
387 + | headerdict['radii'] = {'unit': 'km', 'value': radiiarr} |
388 + | else: |
389 + | casalog.post(f"Unexpected number or matches for Target radii:{m.groups} (expected 2)", 'WARN') |
390 + | #rotational period (few pattens seem to exist) |
391 + | elif re.search(r'rot. period|Rotational period', line): |
392 + | m = re.search(r'rot. period\s+\S*=\s*([0-9.]+h\s*[0-9.]+m\s*[0-9.]+\s*s)|' |
393 + | 'rot. period\s+\S*=\s*([0-9.]+)(?:\s*\+-[0-9.]+)?\s*([dh])|' |
394 + | 'Rotational period\s*=\s+Synchronous', line) |
395 + | if m: |
396 + | if m[0].find('Synchronous') != -1: |
397 + | headerdict['rot_per'] = 'Synchronous' |
398 + | else: |
399 + | if len(m.groups()) == 3: |
400 + | if m[1] is None: |
401 + | headerdict['rot_per'] = _qa.quantity(m[2] + m[3]) |
402 + | elif m[2] is None and m[3] is None: |
403 + | #subm = re.search(r'([0-9]+)h\s*([0-9]+)m\s*([0-9.]+)\s*s',m[1]) |
404 + | headerdict['rot_per'] = _qa.convert(re.sub(r'\s+','',m[1]),'h') |
405 + | # another variation of rotational period entry |
406 + | elif re.search(r'rot.\s+per.', line): |
407 + | m = re.search(r'rot.\s+per.\s*=\s*([0-9.]+)\s*([dh])',line) |
408 + | if m: |
409 + | headerdict['rot_per'] = _qa.quantity(m[1]+m[2]) |
410 + | # rotational period for asteroids |
411 + | elif re.search(r'ROTPER', line): |
412 + | m = re.search(r'ROTPER=\s*([0-9.]+)\s*', line) |
413 + | if m: |
414 + | headerdict['rot_per'] = {'unit': 'h', 'value': float(m[1])} |
415 + | elif re.search(r'orbit period|orb per|orb. per.|orbital period', line.lower()) \ |
416 + | and not foundorbper: |
417 + | m = re.search(r'Orbital period\s*[=~]\s*([-0-9.]+)\s*(\w+)\s*|' |
418 + | 'orb. per., (\w)\s+=\s+([0-9.]+)\s+|' |
419 + | 'orb per\s+=\s+([0-9.]+)\s+(\w+)\s+|' |
420 + | 'orbit period\s+=\s+([0-9.]+)\s+(\w+)\s+', line) |
421 + | if m: |
422 + | if m[0].find('Orbital period') != -1: |
423 + | headerdict['orb_per'] = {'unit': m[2], 'value': float(m[1])} |
424 + | elif m[0].find('orb. per.') != -1: |
425 + | headerdict['orb_per'] = {'unit': m[3], 'value': float(m[4])} |
426 + | elif m[0].find('orb per') != -1: |
427 + | headerdict['orb_per'] = {'unit': m[6], 'value': float(m[5])} |
428 + | elif m[0].find('orbit period') != -1: |
429 + | headerdict['orb_per'] = {'unit': m[8], 'value': float(m[7])} |
430 + | if 'orb_per' in headerdict: |
431 + | foundorbper = True |
432 + | elif re.search(r'Mean [Tt]emperature', line): |
433 + | m = re.search(r'Mean [Tt]emperature\s+\(K\)\s+=\s+([0-9.]+)\s+',line) |
434 + | if m: |
435 + | headerdict['T_mean'] = {'unit':'K', 'value':float(m[1])} |
436 + | # start reading data |
437 + | elif re.search(r'\s*Date__\(UT\)', line): |
438 + | incolnames = line.split() |
439 + | elif re.search(r'\$\$SOE', line): |
440 + | readthedata = True |
441 + | elif re.search(r'\$\$EOE', line): |
442 + | readthedata = False |
443 + | elif readthedata: |
444 + | datalines += 1 |
445 + | outfile.write(line + '\n') |
446 + | elif re.search(r'Projected output length', line): |
447 + | # this message occurs when requested data is too large |
448 + | exceedthelinelimit = line |
449 + | elif re.search(r'Multiple major-bodies match', line): |
450 + | ambiguousname = line |
451 + | else: |
452 + | pass |
453 + | lcnt += 1 |
454 + | if 'radii' in headerdict: |
455 + | radiival = headerdict['radii']['value'] |
456 + | meanrad = _mean_radius(radiival[0], radiival[1], radiival[2]) |
457 + | headerdict['meanrad'] = {'unit': 'km', 'value': meanrad} |
458 + | if datalines == 0: |
459 + | casalog.post("No ephemeris data was found", "WARN") |
460 + | queryerrmsg = exceedthelinelimit if exceedthelinelimit != None else ambiguousname |
461 + | |
462 + | if queryerrmsg: |
463 + | raise RuntimeError(f'Error occurred at the query:{queryerrmsg}') |
464 + | else: |
465 + | raise RuntimeError(f'Error occurred at the query') |
466 + | else: |
467 + | casalog.post(f'Number of data lines={datalines}') |
468 + | casalog.post(f'Number of all lines in the file={lcnt}') |
469 + | #print("headerdict=", headerdict) |
470 + | # output to a casa table |
471 + | |
472 + | # check the input data columns and stored the order as indices |
473 + | foundncols = 0 |
474 + | indexoffset = 0 |
475 + | colkeys = {} |
476 + | #print('incolnames=',incolnames) |
477 + | if incolnames is not None: |
478 + | for outcolname in cols: |
479 + | # all colnames in cols should have unit defined. |
480 + | if 'unit' in cols[outcolname]: |
481 + | colkeys[outcolname] = np.array([cols[outcolname]['unit']]) |
482 + | else: |
483 + | raise KeyError(f'Missing unit for {outcolname}.') |
484 + | inheadername = cols[outcolname]['header'] |
485 + | # expect date is in the first column (date and mm:hh seperated by spaces) |
486 + | if outcolname == 'MJD': |
487 + | for incol in incolnames: |
488 + | if re.search(inheadername + '.+', incol): |
489 + | cols[outcolname]['index'] = incolnames.index(incol) |
490 + | foundncols += 1 |
491 + | indexoffset = 1 |
492 + | if 'index' not in cols[outcolname]: |
493 + | casalog.post("Cannot find the Date column", "WARN") |
494 + | elif outcolname == 'RA': |
495 + | for incol in incolnames: |
496 + | if re.search(inheadername + '.+(ICRF).+', incol): |
497 + | cols[outcolname]['index'] = incolnames.index(incol) + indexoffset |
498 + | foundncols += 1 |
499 + | |
500 + | if 'index' not in cols[outcolname]: |
501 + | casalog.post("Cannot find the astrometric RA and Dec column", "WARN") |
502 + | elif outcolname == 'DEC': |
503 + | if 'index' in cols['RA']: |
504 + | # Dec data col is next to RA data col |
505 + | cols[outcolname]['index'] = cols['RA']['index'] + 1 |
506 + | # add additional offset for index (4 data columns at this point) |
507 + | indexoffset +=1 |
508 + | foundncols += 1 |
509 + | else: |
510 + | if inheadername in incolnames: |
511 + | cols[outcolname]['index'] = incolnames.index(inheadername) + indexoffset |
512 + | foundncols += 1 |
513 + | else: |
514 + | casalog.post(f"Cannot find {inheadername}", "WARN") |
515 + | |
516 + | #print(cols) |
517 + | casalog.post(f"expected n cols = {len(cols)} ") |
518 + | casalog.post(f"foundncols = {foundncols}") |
519 + | if foundncols == len(cols): |
520 + | # Format the data to comply with measure/setjy |
521 + | casalog.post("Found all the required columns") |
522 + | with open(tempconvfname, 'w') as outf, open(tempfname, 'r') as inf: |
523 + | ndata = 0 |
524 + | earliestmjd = None |
525 + | mjd = None |
526 + | prevmjd = None |
527 + | for line in inf: |
528 + | outline = '' |
529 + | sep = ' ' |
530 + | rawtempdata = line.split() |
531 + | tempdata = ['-999.0' if x == 'n.a.' else x for x in rawtempdata] |
532 + | # construct mjd from col 1 (calendar date) + col 2 (time) |
533 + | caldatestr = tempdata[0] + ' ' + tempdata[1] |
534 + | mjd = _qa.totime(caldatestr) |
535 + | if mjd == prevmjd: |
536 + | raise RuntimeError(f'Duplicated timestamp, {mjd}, is detected. This may occur when '+ |
537 + | 'a time range is specified in MJD with a short time interval. If that is the case, '+ |
538 + | 'try calendar date+time string for the time range.') |
539 + | outline += str(mjd['value']) + sep |
540 + | # position |
541 + | rad = tempdata[cols['RA']['index']] |
542 + | decd = tempdata[cols['DEC']['index']] |
543 + | outline += rad + sep + decd + sep |
544 + | # geocentric dist. (Rho) |
545 + | delta = tempdata[cols['Rho']['index']] |
546 + | outline += delta + sep |
547 + | # geocentric range rate (RadVel) |
548 + | valinkmps = tempdata[cols['RadVel']['index']] |
549 + | deldot = _qa.convert(_qa.quantity(valinkmps+'km/s'), 'AU/d' )['value'] |
550 + | outline += str(deldot) + sep |
551 + | # NP_ang & NP_dist |
552 + | npang = tempdata[cols['NP_ang']['index']] |
553 + | npdist = tempdata[cols['NP_dist']['index']] |
554 + | outline += npang + sep + npdist + sep |
555 + | # DiskLong & DiskLat |
556 + | disklong = tempdata[cols['DiskLong']['index']] |
557 + | disklat = tempdata[cols['DiskLat']['index']] |
558 + | outline += disklong + sep + disklat + sep |
559 + | # sub-long & sub-lat |
560 + | sllon = tempdata[cols['Sl_lon']['index']] |
561 + | sllat = tempdata[cols['Sl_lat']['index']] |
562 + | outline += sllon + sep + sllat + sep |
563 + | # r, rot |
564 + | r = tempdata[cols['r']['index']] |
565 + | rdot = tempdata[cols['rdot']['index']] |
566 + | outline += r + sep + rdot + sep |
567 + | # S-T-O |
568 + | phang = tempdata[cols['phang']['index']] |
569 + | outline += phang |
570 + | outf.write(outline + '\n') |
571 + | |
572 + | ndata += 1 |
573 + | if ndata == 1: |
574 + | earliestmjd = mjd |
575 + | prevmjd = mjd |
576 + | # record first and last mjd in the data |
577 + | headerdict['earliest'] = _me.epoch('UTC',earliestmjd) |
578 + | headerdict['latest'] = _me.epoch('UTC', mjd) |
579 + | |
580 + | else: |
581 + | missingcols = len(cols) - foundncols |
582 + | casalog.post(r"Missing {missingcols}") |
583 + | |
584 + | # final step: convert to a CASA table |
585 + | dtypes = np.array(['D' for _ in range(len(cols))]) |
586 + | _tb.fromascii(outtable, tempconvfname, sep=' ', columnnames=list(cols.keys()), |
587 + | datatypes=dtypes.tolist()) |
588 + | _tb.done() |
589 + | |
590 + | # fill keyword values in the ephem table |
591 + | if os.path.exists(outtable): |
592 + | _fill_keywords_from_dict(headerdict, colkeys, outtable) |
593 + | casalog.post(f"Output is written to a CASA table, {outtable}") |
594 + | else: |
595 + | raise RuntimeError("Error occured. The output table, " + outtable + "is not generated") |
596 + | else: #incolnames is None |
597 + | raise RuntimeError("No data header was found.") |
598 + | |
599 + | except RuntimeError as e: |
600 + | casalog.post(str(e),"SEVERE") |
601 + | |
602 + | finally: |
603 + | tempfiles = [tempfname, tempconvfname] |
604 + | _clean_up(tempfiles) |
605 + | |
606 + | # This needs to change to read the ephem data from the generated casa table. |
607 + | # Will be called in fill_keywords_from_dict() |
608 + | |
609 + | # Copied from JPLephem_reader2.py for mean_radius calc (when no NP-ang, NP_dist) |
610 + | # Note: this is not fully verified for the correctness but has been used |
611 + | # through JPLephem_reader2 for processing the ephemeris data to a CASA table. |
612 + | # For setjy, solar_system_setjy does own mean radius calculation and |
613 + | # meanrad is the table is not used for that. 2022.01.12 TT |
614 + | def _mean_radius(a, b, c): |
615 + | """ |
616 + | Return the average apparent mean radius of an ellipsoid with semiaxes |
617 + | a >= b >= c. |
618 + | "average" means average over time naively assuming the pole orientation |
619 + | is uniformly distributed over the whole sphere, and "apparent mean radius" |
620 + | means a radius that would give the same area as the apparent disk. |
621 + | """ |
622 + | # This is an approximation, but it's not bad. |
623 + | # The exact equations for going from a, b, c, and the Euler angles to the |
624 + | # apparent ellipse are given in Drummond et al., Icarus, 1985a. |
625 + | # It's the integral over the spin phase that I have approximated, so the |
626 + | # approximation is exact for b == a and appears to hold well for b << a. |
627 + | R = 0.5 * c ** 2 * (1.0 / b ** 2 + 1.0 / a ** 2) # The magic ratio. |
628 + | if R < 0.95: |
629 + | sqrt1mR = sqrt(1.0 - R) |
630 + | # There is fake singularity (RlnR) at R = 0, but it is unlikely to |
631 + | # be a problem. |
632 + | try: |
633 + | Rterm = 0.5 * R * log((1.0 + sqrt1mR) / (1.0 - sqrt1mR)) / sqrt1mR |
634 + | except RuntimeError: |
635 + | Rterm = 0.0 |
636 + | else: |
637 + | # Use a (rapidly converging) series expansion to avoid a fake |
638 + | # singularity at R = 1. |
639 + | Rterm = 1.0 # 0th order |
640 + | onemR = 1.0 - R |
641 + | onemRtothei = 1.0 |
642 + | for i in range(1, 5): # Start series at 1st order. |
643 + | onemRtothei *= onemR |
644 + | Rterm -= onemRtothei / (0.5 + 2.0 * i ** 2) |
645 + | avalfabeta = 0.5 * a * b * (1.0 + Rterm) |
646 + | return sqrt(avalfabeta) |
647 + | |
648 + | |
649 + | def _mean_radius_with_known_theta(disklat, radii): |
650 + | """ |
651 + | Mean radius calculation extracted from solar_system_setjy |
652 + | """ |
653 + | Req = 1000.0 * (radii[0] + radii[1]) / 2 |
654 + | Rp = 1000.0 * radii[2] |
655 + | |
656 + | Rmean = 0.0 |
657 + | index = 0 |
658 + | for index, lat in enumerate(disklat): |
659 + | if lat == -999.0: |
660 + | lat = 0.0 |
661 + | rlat = lat * (180. / np.pi) |
662 + | Rpap = sqrt(Req * Req * sin(rlat) ** 2.0 + |
663 + | Rp * Rp * cos(rlat) ** 2.0) |
664 + | # Rmean += ( sqrt (Rpap * Req) - Rmean )/ (index + 1.0) |
665 + | Rmean += sqrt(Rpap * Req) |
666 + | |
667 + | return (Rmean / (index + 1.0)) / 1000.0 |
668 + | |
669 + | |
670 + | def _fill_keywords_from_dict(keydict, colkeys, tablename): |
671 + | # open tb |
672 + | # get ra,dec,np_ra, np_dec, and radii in the keyword |
673 + | # call mod version of mean_radius_with_known_theta |
674 + | orderedmainkeys = ['VS_CREATE','VS_DATE','VS_TYPE','VS_VERSION','NAME', |
675 + | 'MJD0','dMJD','GeoDist','GeoLat','GeoLong','obsloc', |
676 + | 'posrefsys','earliest','latest','radii', |
677 + | 'meanrad','orb_per','rot_per','T_mean'] |
678 + | try: |
679 + | _tb.open(tablename, nomodify=False) |
680 + | disklat = _tb.getcol('DiskLat') |
681 + | if 'radii' in keydict: |
682 + | radiival = keydict['radii']['value'] |
683 + | calc_meanrad = _mean_radius_with_known_theta(disklat, radiival) |
684 + | if calc_meanrad and ('meanrad' in keydict): |
685 + | keydict['meanrad']['value'] = calc_meanrad |
686 + | if 'rot_per' in keydict and 'orb_per' in keydict and keydict['rot_per'] == 'Synchronous': |
687 + | keydict['rot_per'] = keydict['orb_per'] |
688 + | sortedkeydict = {k: keydict[k] for k in orderedmainkeys if k in keydict} |
689 + | #print('sorteddict=',sortedkeydict) |
690 + | for k in sortedkeydict: |
691 + | _tb.putkeyword(k, sortedkeydict[k]) |
692 + | datacolnames = _tb.colnames() |
693 + | for col in datacolnames: |
694 + | if col in colkeys: |
695 + | _tb.putcolkeyword(col, 'QuantumUnits', colkeys[col]) |
696 + | # add table info required by measComet |
697 + | maintbinfo = {'readme': 'Derivied by jplhorizons-query from JPL-Horizons API ' |
698 + | '(https://ssd.jpl.nasa.gov/api/horizons.api)', |
699 + | 'subType':'Comet', |
700 + | 'type': 'IERS'} |
701 + | _tb.putinfo(maintbinfo) |
702 + | _tb.flush() |
703 + | _tb.done() |
704 + | except RuntimeError: |
705 + | casalog.post('Internal error: Cannot add the data in keywords', "WARN") |
706 + | |
707 + | def _clean_up(filelist): |
708 + | """ |
709 + | Clean up the temporary files |
710 + | """ |
711 + | for f in filelist: |
712 + | if os.path.exists(f): |
713 + | os.remove(f) |