Commits
Akeem Wells authored 89eddf0ec3f Merge
1 - | """ |
2 - | Updated version to support the current JPL-Horizons query results as of 2013 June. |
3 - | (when S-T-O is requested the current query result returns phi, PAB-LON, PAB-LAT along |
4 - | with S-T-O value). |
5 - | |
6 - | casapy functions for converting ASCII ephemerides from JPL-Horizons into |
7 - | CASA tables and installing them where casapy can find them. |
8 - | |
9 - | jplfiles_to_repository() puts it all together, so it is most likely the |
10 - | function you want. |
11 - | |
12 - | There are various utilities like convert_radec, datestr*, get_num_from_str, |
13 - | mean_radius*, and construct_tablepath defined in here as well. |
14 - | """ |
15 - | |
16 - | from __future__ import absolute_import |
17 - | from glob import glob |
18 - | import os |
19 - | import re |
20 - | import sys |
21 - | import scipy.special |
22 - | import time # We can always use more time. |
23 - | import numpy |
24 - | |
25 - | # get is_python3 and is_CASA6 |
26 - | from casatasks.private.casa_transition import * |
27 - | if is_CASA6: |
28 - | from casatools import quanta, measures |
29 - | from casatools import table as tbtool |
30 - | from casatasks import casalog |
31 - | |
32 - | _qa = quanta( ) |
33 - | _me = measures( ) |
34 - | else: |
35 - | from taskinit import gentools, tbtool, qa, casalog |
36 - | |
37 - | _me = gentools(['me'])[0] |
38 - | # not really a local tool |
39 - | _qa = qa |
40 - | |
41 - | # Possible columns, as announced by their column titles. |
42 - | # The data is scooped up by 'pat'. Either use ONE group named by the column |
43 - | # key, or mark it as unwanted. '-' is not valid in group names. |
44 - | # Leading and trailing whitespace will be taken care of later. |
45 - | # Sample lines: |
46 - | # Date__(UT)__HR:MN R.A.___(ICRF/J2000.0)___DEC Ob-lon Ob-lat Sl-lon Sl-lat NP.ang NP.dist r rdot delta deldot S-T-O L_s |
47 - | # 2010-May-01 00:00 09 01 43.1966 +19 04 28.673 286.52 18.22 246.99 25.34 358.6230 3.44 1.661167637023 -0.5303431 1.28664311447968 15.7195833 37.3033 84.50 |
48 - | # |
49 - | # some mod to label names and comments so that they corresponds to |
50 - | # JPL-Horizons naming comvension |
51 - | cols = { |
52 - | 'MJD': {'header': r'Date__\(UT\)__HR:MN(:)?(\w+)?(\.)?(\w+)?', |
53 - | 'comment': 'date', |
54 - | 'pat': r'(?P<MJD>\d+-\w+-\d+ \d+:\d+)'}, |
55 - | 'RA': {'header': r'R.A._+\([^)]+', |
56 - | 'comment': 'Right Ascension', |
57 - | 'pat': r'(?P<RA>(\d+ \d+ )?\d+\.\d+)'}, # require a . for safety |
58 - | 'DEC': {'header': r'\)_+DEC.', |
59 - | 'comment': 'Declination', |
60 - | 'pat': r'(?P<DEC>([-+]?\d+ \d+ )?[-+]?\d+\.\d+)'}, |
61 - | 'NP_ang': {'header': r'NP\.ang', |
62 - | 'comment': 'North-Pole pos. angle', |
63 - | 'pat': r'(?P<NP_ang>[0-9.]+|n\.a\.)', |
64 - | 'unit': 'deg'}, |
65 - | 'NP_dist': {'header': r'NP\.dist', |
66 - | 'comment': 'North-Pole ang. distance', |
67 - | 'pat': r'(?P<NP_dist>[-+0-9.]+|n\.a\.)', |
68 - | 'unit': 'arcsec'}, |
69 - | 'illu': {'header': r'Illu%', |
70 - | #'comment': 'Illumination', |
71 - | 'comment': 'Illuminated fraction', |
72 - | 'pat': r'(?P<illu>[0-9.]+)', |
73 - | 'unit': r'%'}, |
74 - | # put back to original heading... |
75 - | # TT: query result header name change (sometime in 2018) |
76 - | # Ob_lon -> Obsrv_lon, Ob_lat->Obsrv_lat, |
77 - | # Sl_lon -> Solar-lon, Sl_lat ->Solar-lat |
78 - | #'DiskLong': {'header': r'Ob-lon', |
79 - | 'DiskLong': {'header': r'Obsrv-lon', |
80 - | 'comment': 'Sub-observer longitude', |
81 - | # 'pat': r'(?P<Obs_Long>[0-9.]+|n\.a\.)', |
82 - | 'pat': r'(?P<DiskLong>[0-9.]+|n\.a\.)', |
83 - | 'unit': 'deg'}, |
84 - | #'DiskLat': {'header': r'Ob-lat', |
85 - | 'DiskLat': {'header': r'Obsrv-lat', |
86 - | 'comment': 'Sub-observer latitude', |
87 - | # 'pat': r'(?P<Obs_Lat>[-+0-9.]+|n\.a\.)', |
88 - | 'pat': r'(?P<DiskLat>[-+0-9.]+|n\.a\.)', |
89 - | 'unit': 'deg'}, |
90 - | #'Sl_lon': {'header': r'Sl-lon', |
91 - | 'Sl_lon': {'header': r'Solar-lon', |
92 - | 'comment': 'Sub-Solar longitude', |
93 - | 'pat': r'(?P<Sl_lon>[0-9.]+|n\.a\.)', |
94 - | 'unit': 'deg'}, |
95 - | #'Sl_lat': {'header': r'Sl-lat', |
96 - | 'Sl_lat': {'header': r'Solar-lat', |
97 - | 'comment': 'Sub-Solar longitude', |
98 - | 'pat': r'(?P<Sl_lat>[-+0-9.]+|n\.a\.)', |
99 - | 'unit': 'deg'}, |
100 - | |
101 - | # These are J2000 whether or not ra and dec are apparent directions. |
102 - | 'NP_RA': {'header': r'N\.Pole-RA', |
103 - | 'comment': 'North Pole right ascension', |
104 - | 'pat': r'(?P<NP_RA>(\d+ \d+ )?\d+\.\d+)'}, # require a . for safety |
105 - | 'NP_DEC': {'header': r'N\.Pole-DC', |
106 - | 'comment': 'North Pole declination', |
107 - | 'pat': r'(?P<NP_DEC>([-+]?\d+ \d+ )?[-+]?\d+\.\d+)'}, |
108 - | |
109 - | 'r': {'header': 'r', |
110 - | 'comment': 'heliocentric distance', |
111 - | 'unit': 'AU', |
112 - | 'pat': r'(?P<r>[0-9.]+)'}, |
113 - | 'rdot': {'header': 'rdot', |
114 - | #'comment': 'heliocentric velocity', |
115 - | 'comment': 'heliocentric distance rate', |
116 - | 'unit': 'km/s', |
117 - | 'pat': r'(?P<rdot>[-+0-9.]+)'}, |
118 - | # 'unwanted': True}, |
119 - | 'Rho': {'header': 'delta', |
120 - | 'comment': 'geocentric distance', |
121 - | 'unit': 'AU', |
122 - | 'pat': r'(?P<Rho>[0-9.]+)'}, |
123 - | 'RadVel': {'header': 'deldot', |
124 - | #'comment': 'Radial velocity relative to the observer', |
125 - | 'comment': 'geocentric distance rate', |
126 - | 'pat': r'(?P<RadVel>[-+0-9.]+)', |
127 - | 'unit': 'km/s'}, |
128 - | 'phang': {'header': 'S-T-O', |
129 - | 'comment': 'phase angle', |
130 - | 'unit': 'deg', |
131 - | 'pat': r'(?P<phang>[0-9.]+)'}, |
132 - | # added columns to match current(as of June 2013) query result |
133 - | 'phi': {'header': 'phi', |
134 - | 'comment': 'phase angle', |
135 - | 'unit': 'deg', |
136 - | 'pat': r'(?P<phi>[0-9.]+)', |
137 - | 'unwanted': True}, |
138 - | 'PAB_LON': {'header': 'PAB-LON', |
139 - | 'comment': 'ecliptic longitude', |
140 - | 'unit': 'deg', |
141 - | 'pat': r'(?P<PAB_LON>[0-9.]+)', |
142 - | 'unwanted': True}, |
143 - | 'PAB_LAT': {'header': 'PAB-LAT', |
144 - | 'comment': 'ecliptic latitude', |
145 - | 'unit': 'deg', |
146 - | 'pat': r'(?P<PAB_LAT>[-+0-9.]+)', |
147 - | 'unwanted': True}, |
148 - | # ----------------------------------------------------------- |
149 - | 'ang_sep': {'header': 'ang-sep/v', |
150 - | 'comment': 'Angular separation from primary', |
151 - | 'pat': r'(?P<ang_sep>[0-9.]+/.)'}, # arcsec, "visibility code". |
152 - | # t: transiting primary |
153 - | # O: occulted by primary |
154 - | # p: partial umbral eclipse |
155 - | # P: occulted partial umbral eclipse |
156 - | # u: total umbral eclipse |
157 - | # U: occulted total umbral eclipse |
158 - | # *: none of the above |
159 - | # -: target is primary |
160 - | 'L_s': {'header': 'L_s', # 08/2010: JPL does not supply this and |
161 - | 'unit': 'deg', # says they cannot. Ask Bryan Butler. |
162 - | 'comment': 'Season angle', |
163 - | 'pat': r'(?P<L_s>[-+0-9.]+)'} |
164 - | |
165 - | } |
166 - | |
167 - | def readJPLephem(fmfile,version=''): |
168 - | """ |
169 - | Reads a JPL Horizons text file (see |
170 - | http://ssd.jpl.nasa.gov/horizons.cgi#top ) for a solar system object and |
171 - | returns various quantities in a dictionary. The dict will be blank ({}) if |
172 - | there is a failure. |
173 - | """ |
174 - | retdict = {} |
175 - | casalog.origin('readJPLephem') |
176 - | |
177 - | # Try opening fmfile now, because otherwise there's no point continuing. |
178 - | try: |
179 - | ephem = open(fmfile, 'rb') |
180 - | casalog.post("opened the file=%s" % fmfile) |
181 - | lines=ephem.readlines() |
182 - | # skip this, handle by rstrip later |
183 - | #crCount=0 |
184 - | #newlines='' |
185 - | #newln='' |
186 - | #for ln in lines: |
187 - | # n = ln.count('\r') |
188 - | # if n > 0: |
189 - | # newln=ln.replace('\r','\n') |
190 - | # crCount+=n |
191 - | # newlines += newln |
192 - | #if crCount > 0: |
193 - | # casalog.post("The input file appears to contains the carriage return code, \'^M\', will replace it with \'\\n\'...") |
194 - | # raw_input('pause0') |
195 - | # ephem.close() |
196 - | # ephem = open('temp_ephem_data.txt','w+') |
197 - | # ephem.write(newlines) |
198 - | ephem.seek(0) |
199 - | except IOError: |
200 - | casalog.post("Could not open ephemeris file " + fmfile, |
201 - | priority="SEVERE") |
202 - | return {} |
203 - | |
204 - | # reset to default search pattern for MJD |
205 - | cols['MJD']['pat']=r'(?P<MJD>\d+-\w+-\d+ \d+:\d+)' |
206 - | # Setup the regexps. |
207 - | |
208 - | # Headers (one time only things) |
209 - | |
210 - | # Dictionary of quantity label: regexp pattern pairs that will be searched |
211 - | # for once. The matching quantity will go in retdict[label]. Only a |
212 - | # single quantity (group) will be retrieved per line. |
213 - | headers = { |
214 - | 'NAME': {'pat': r'^[>\s]*Target body name:\s+\d*\s*(\w+)'}, # object name, w.o. number |
215 - | 'ephtype': {'pat': r'\?s_type=1#top>\]\s*:\s+\*(\w+)'}, # e.g. OBSERVER |
216 - | 'obsloc': {'pat': r'^[>\s]*Center-site name:\s+(\w+)'}, # e.g. GEOCENTRIC |
217 - | # Catch either an explicit mean radius or a solitary target radius. |
218 - | 'meanrad': {'pat': r'(?:Mean radius \(km\)\s*=|^Target radii\s*:)\s*([0-9.]+)(?:\s*km)?\s*$', |
219 - | 'unit': 'km'}, |
220 - | # Triaxial target radii |
221 - | #'radii': {'pat': r'Target radii\s*:\s*([0-9.]+\s*x\s*[0-9.]+\s*x\s*[0-9.]+)\s*km.*Equator, meridian, pole', |
222 - | 'radii': {'pat': r'Target radii\s*:\s*([0-9.]+\s*x\s*[0-9.]+\s*x\s*[0-9.]+)\s*km.*Equator, meridian, pole|Target radii\s*:\s*([0-9.]+)\s*km\s*', |
223 - | 'unit': 'km'}, |
224 - | 'T_mean': {'pat': r'Mean Temperature \(K\)\s*=\s*([0-9.]+)', |
225 - | 'unit': 'K'}, |
226 - | |
227 - | # Figure out the units later. |
228 - | 'rot_per': {'pat': r'(?i)(?<!Inferred )\b(rot(ation(al)?|\.)?\s*per.*=\s*([-0-9.]+\s*[dhr]*|Synchronous))'}, |
229 - | 'orb_per': {'pat': r'Orbital period((, days)?\s*=\s*[-0-9.]+\s*[dhr](\s*\(?R\)?)?)'}, |
230 - | |
231 - | # MeasComet does not read units for these! E-lon(deg), Lat(deg), Alt(km) |
232 - | 'GeoLong': {'pat': r'^[>\s]*Center geodetic\s*: ([-+0-9.]+,\s*[-+0-9.]+,\s*[-+0-9.]+)'}, |
233 - | 'dMJD': {'pat': r'^[>\s]*Step-size\s*:\s*(.+)'}, |
234 - | |
235 - | # request method v wday mth mday hh mm ss yyyy |
236 - | 'VS_CREATE': {'pat': r'^[>\s]*Ephemeris / \w+ \w+ (\w+\s+\d+\s+\d+:\d+:\d+\s+\d+)'} |
237 - | } |
238 - | for hk in headers: |
239 - | headers[hk]['pat'] = re.compile(headers[hk]['pat']) |
240 - | |
241 - | # Data ("the rows of the table") |
242 - | |
243 - | # need date, r (heliocentric distance), delta (geocentric distance), and phang (phase angle). |
244 - | # (Could use the "dot" time derivatives for Doppler shifting, but it's |
245 - | # likely unnecessary.) |
246 - | #datapat = r'^[>\s]*' |
247 - | datapat = r'^\s*' |
248 - | |
249 - | stoppat = r'[>\s]*\$\$EOE$' # Signifies the end of data. |
250 - | |
251 - | # Read fmfile into retdict. |
252 - | num_cols = 0 |
253 - | in_data = False |
254 - | comp_mismatches = [] |
255 - | print_datapat = False |
256 - | # define interpretation of invalid values ('n.a.') |
257 - | invalid=-999. |
258 - | for origline in ephem: |
259 - | if is_python3: |
260 - | #line = origline.decode("utf-8").rstrip('\r\n') |
261 - | line = origline.decode(sys.getdefaultencoding( ),"strict").rstrip('\r\n') |
262 - | else: |
263 - | line = origline.rstrip('\r\n') |
264 - | if in_data: |
265 - | if re.match(stoppat, line): |
266 - | break |
267 - | matchobj = re.search(datapat, line) |
268 - | if matchobj: |
269 - | #casalog.post("matchobj!") |
270 - | gdict = matchobj.groupdict() |
271 - | #casalog.post("gdict=",gdict) |
272 - | for col in gdict: |
273 - | if gdict[col]=='n.a.': |
274 - | gdict[col]=invalid |
275 - | # casalog.post("cols.key=",cols.keys()) |
276 - | |
277 - | if not cols[col].get('unwanted'): |
278 - | retdict['data'][col]['data'].append(gdict[col]) |
279 - | if len(gdict) < num_cols: |
280 - | casalog.post("Partially mismatching line: {}".format(line)) |
281 - | casalog.post("Found: {}".format(gdict)) |
282 - | print_datapat = True |
283 - | raw_input("pause0") |
284 - | else: |
285 - | print_datapat = True |
286 - | # Chomp trailing whitespace. |
287 - | comp_mismatches.append(re.sub(r'\s*$', '', line)) |
288 - | elif re.match(r'^[>\s]*' + cols['MJD']['header'] + r'\s+' |
289 - | + cols['RA']['header'], line): |
290 - | # need to modify regex to search for the header name for MJD containing second digits |
291 - | if re.match(r'^[>\s]*Date__\(UT\)__HR:MN:SC.fff', line): |
292 - | cols['MJD']['pat'] = r'(?P<MJD>\d+-\w+-\d+ \d+:\d+:\d+.\d+)' |
293 - | # See what columns are present, and finish setting up datapat and |
294 - | # retdict. |
295 - | havecols = [] |
296 - | # extract coordinate ref info |
297 - | |
298 - | m=re.match(r'(^[>\s]*)(\S+)(\s+)('+cols['RA']['header']+')', line) |
299 - | coordref=m.group(4).split('(')[-1] |
300 - | cols['RA']['comment']+='('+coordref+')' |
301 - | cols['DEC']['comment']+='('+coordref+')' |
302 - | # casalog.post( "cols['RA']['comment']=" + cols['RA']['comment']) |
303 - | # Chomp trailing whitespace. |
304 - | myline = re.sub(r'\s*$', '', line) |
305 - | titleline = myline |
306 - | remaining_cols = list(cols.keys()) |
307 - | found_col = True |
308 - | # This loop will terminate one way or another. |
309 - | while myline and remaining_cols and found_col: |
310 - | found_col = False |
311 - | #casalog.post("myline = '%s'" % myline) |
312 - | #casalog.post("remaining_cols =" + ', '.join(remaining_cols) |
313 - | for col in remaining_cols: |
314 - | if re.match(r'^[>\s]*' + cols[col]['header'], myline): |
315 - | # casalog.post("Found" + col) |
316 - | havecols.append(col) |
317 - | remaining_cols.remove(col) |
318 - | myline = re.sub(r'^[>\s]*' + cols[col]['header'], |
319 - | '', myline) |
320 - | found_col = True |
321 - | break |
322 - | datapat += r'\s+'.join([cols[col]['pat'] for col in havecols]) |
323 - | sdatapat = datapat |
324 - | casalog.post("Found columns: " + ', '.join(havecols)) |
325 - | datapat = re.compile(datapat) |
326 - | retdict['data'] = {} |
327 - | for col in havecols: |
328 - | if not cols[col].get('unwanted'): |
329 - | retdict['data'][col] = {'comment': cols[col]['comment'], |
330 - | 'data': []} |
331 - | num_cols = len(retdict['data']) |
332 - | #elif re.match(r'^\$\$SOE\s*$', line): # Start of ephemeris |
333 - | elif re.match(r'^[>\s]*\$\$SOE\s*$', line): # Start of ephemeris |
334 - | casalog.post("Starting to read data.", priority='INFO2') |
335 - | in_data = True |
336 - | else: |
337 - | #casalog.post("line =" + line) |
338 - | #casalog.post("looking for") |
339 - | for hk in headers: |
340 - | #casalog.post("hk=" + hk) |
341 - | |
342 - | if not hk in retdict: |
343 - | matchobj = re.search(headers[hk]['pat'], line) |
344 - | if matchobj: |
345 - | if hk=='radii': |
346 - | mobjs=matchobj.groups() |
347 - | for gp in mobjs: |
348 - | if gp!=None: |
349 - | retdict[hk] = gp |
350 - | break |
351 - | break |
352 - | else: |
353 - | retdict[hk] = matchobj.group(1) # 0 is the whole line |
354 - | break |
355 - | ephem.close() |
356 - | # clean up the temp file if exists |
357 - | #if os.path.exists('temp_ephem_data.txt'): |
358 - | # os.remove('temp_ephem_data.txt') |
359 - | |
360 - | # If there were errors, provide debugging info. |
361 - | if comp_mismatches: |
362 - | casalog.post("Completely mismatching lines:") |
363 - | #casalog.post("\n".join(comp_mismatches)) |
364 - | if print_datapat: |
365 - | casalog.post("The apparent title line is:") |
366 - | casalog.post(titleline) |
367 - | casalog.post("datapat = r'%s'" % sdatapat) |
368 - | |
369 - | # Convert numerical strings into actual numbers. |
370 - | try: |
371 - | retdict['earliest'] = datestr_to_epoch(retdict['data']['MJD']['data'][0]) |
372 - | retdict['latest'] = datestr_to_epoch(retdict['data']['MJD']['data'][-1]) |
373 - | except Exception: |
374 - | casalog.post("Error!", 'ERROR') |
375 - | if 'data' in retdict: |
376 - | if 'MJD' in retdict['data']: |
377 - | if 'data' in retdict['data']['MJD']: |
378 - | #casalog.post( "retdict['data']['MJD']['data'] =" + retdict['data']['MJD']['data']) |
379 - | casalog.post("retdict['data'] = %s" % retdict['data']) |
380 - | else: |
381 - | casalog.post("retdict['data']['MJD'] has no 'data' key.") |
382 - | casalog.post("retdict['data']['MJD'].keys() = %s" % retdict['data']['MJD'].keys()) |
383 - | else: |
384 - | casalog.post("retdict['data'] has no 'MJD' key.") |
385 - | casalog.post("retdict['data'].keys() = %s" % retdict['data'].keys()) |
386 - | else: |
387 - | casalog.post("retdict has no 'data' key.") |
388 - | raise |
389 - | |
390 - | for hk in headers: |
391 - | if hk in retdict: |
392 - | if 'unit' in headers[hk]: |
393 - | if hk == 'radii': |
394 - | radii = retdict[hk].split('x') |
395 - | if len(radii)==1: |
396 - | a = float(radii[0]) |
397 - | retdict[hk] = {'unit': headers[hk]['unit'], 'value': (a,a,a)} |
398 - | retdict['meanrad'] = {'unit': headers[hk]['unit'], |
399 - | 'value': a} |
400 - | else: |
401 - | a, b, c = [float(r) for r in radii] |
402 - | retdict[hk] = {'unit': headers[hk]['unit'], |
403 - | 'value': (a, b, c)} |
404 - | retdict['meanrad'] = {'unit': headers[hk]['unit'], |
405 - | 'value': mean_radius(a, b, c)} |
406 - | else: |
407 - | try: |
408 - | # meanrad might already have been converted. |
409 - | if type(retdict[hk]) != dict: |
410 - | retdict[hk] = {'unit': headers[hk]['unit'], |
411 - | 'value': float(retdict[hk])} |
412 - | except Exception: |
413 - | casalog.post("Error converting header %s to a Quantity." % hk) |
414 - | casalog.post("retdict[hk] = %s" % retdict[hk]) |
415 - | raise |
416 - | elif hk == 'GeoLong': |
417 - | long_lat_alt = retdict[hk].split(',') |
418 - | retdict['GeoLong'] = float(long_lat_alt[0]) |
419 - | retdict['GeoLat'] = float(long_lat_alt[1]) |
420 - | retdict['GeoDist'] = float(long_lat_alt[2]) |
421 - | elif hk == 'dMJD': |
422 - | retdict[hk] = _qa.convert(_qa.totime(retdict[hk].replace('minutes', 'min')), |
423 - | 'd')['value'] |
424 - | elif hk == 'orb_per': |
425 - | unit = 'h' |
426 - | retrograde = False |
427 - | if 'd' in retdict[hk].lower(): |
428 - | unit = 'd' # Actually this is most common. |
429 - | if 'r' in retdict[hk].lower(): |
430 - | retrograde = True |
431 - | value = get_num_from_str(retdict[hk], 'orbital period') |
432 - | if value != False: |
433 - | if retrograde and value > 0.0: |
434 - | value = -value |
435 - | retdict[hk] = {'unit': unit, 'value': value} |
436 - | else: |
437 - | del retdict[hk] |
438 - | |
439 - | # The rotation period might depend on the orbital period ("Synchronous"), |
440 - | # so handle it after all the other headers have been done. |
441 - | if 'rot_per' in retdict: |
442 - | rpstr = retdict['rot_per'] |
443 - | if 'ROTPER' in rpstr: # Asteroid |
444 - | retdict['rot_per'] = {'unit': 'h', # Always seems to be for asteroids. |
445 - | 'value': get_num_from_str(rpstr, 'rotation period')} |
446 - | elif 'Synchronous' in rpstr: |
447 - | retdict['rot_per'] = retdict['orb_per'] |
448 - | else: # Most likely a planet. |
449 - | match = re.search(r'(\d+)h\s*(\d+)m\s*([0-9.]+)s', rpstr) |
450 - | if match: |
451 - | hms = [float(match.group(i)) for i in range(1, 4)] |
452 - | retdict['rot_per'] = {'unit': 'h', |
453 - | 'value': hms[0] + (hms[1] + hms[2] / 60.0) / 60.0} |
454 - | else: |
455 - | # DON'T include the optional r in hr! qa.totime can't handle it. |
456 - | try: |
457 - | match = re.search(r'([-0-9.]+)(?:\s*\+-[0-9.]+)?\s*([dh])', rpstr) |
458 - | if match: |
459 - | retdict['rot_per'] = {'unit': match.group(2), |
460 - | 'value': float(match.group(1))} |
461 - | except: |
462 - | casalog.post("Error parsing the rotation period from: {}".format(rpstr)) |
463 - | |
464 - | if 'ang_sep' in retdict['data']: |
465 - | retdict['data']['obs_code'] = {'comment': 'Obscuration code'} |
466 - | for dk in retdict['data']: |
467 - | if dk == 'obs_code': |
468 - | continue |
469 - | if 'unit' in cols[dk]: |
470 - | retdict['data'][dk]['data'] = {'unit': cols[dk]['unit'], |
471 - | 'value': scipy.array([float(s) for s in retdict['data'][dk]['data']])} |
472 - | if dk == 'RadVel': |
473 - | # Convert from km/s to AU/d. Blame MeasComet, not me. |
474 - | retdict['data'][dk]['data']['unit'] = 'AU/d' |
475 - | kmps_to_AUpd = _qa.convert('1km/s', 'AU/d')['value'] |
476 - | retdict['data'][dk]['data']['value'] *= kmps_to_AUpd |
477 - | |
478 - | if re.match(r'.*(RA|DEC)$', dk): |
479 - | retdict['data'][dk] = convert_radec(retdict['data'][dk]) |
480 - | elif dk == 'MJD': |
481 - | retdict['data']['MJD'] = datestrs_to_MJDs(retdict['data']['MJD']) |
482 - | elif dk == 'ang_sep': |
483 - | angseps = [] |
484 - | obscodes = [] |
485 - | for asoc in retdict['data'][dk]['data']: |
486 - | angsep, obscode = asoc.split('/') |
487 - | angseps.append(float(angsep)) |
488 - | obscodes.append(obscode) |
489 - | retdict['data'][dk]['data'] = {'unit': 'arcseconds', |
490 - | 'value': angseps} |
491 - | retdict['data']['obs_code']['data'] = obscodes |
492 - | |
493 - | if len(retdict.get('radii', {'value': []})['value']) == 3 \ |
494 - | and 'NP_RA' in retdict['data'] and 'NP_DEC' in retdict['data']: |
495 - | # Do a better mean radius estimate using the actual theta. |
496 - | retdict['meanrad']['value'] = mean_radius_with_known_theta(retdict) |
497 - | |
498 - | # To be eventually usable as a MeasComet table, a few more keywords are needed. |
499 - | retdict['VS_TYPE'] = 'Table of comet/planetary positions' |
500 - | if version=='': |
501 - | version='0003.0001' |
502 - | #retdict['VS_VERSION'] = '0003.0001' |
503 - | retdict['VS_VERSION'] = version |
504 - | if 'VS_CREATE' in retdict: |
505 - | dt = time.strptime(retdict['VS_CREATE'], "%b %d %H:%M:%S %Y") |
506 - | else: |
507 - | casalog.post("The ephemeris creation date was not found. Using the current time.", |
508 - | priority="WARN") |
509 - | dt = time.gmtime() |
510 - | retdict['VS_CREATE'] = time.strftime('%Y/%m/%d/%H:%M', dt) |
511 - | |
512 - | # VS_DATE is required by MeasComet, but it doesn't seem to be actually used. |
513 - | retdict['VS_DATE'] = time.strftime('%Y/%m/%d/%H:%M', time.gmtime()) |
514 - | |
515 - | if 'MJD' in retdict['data']: |
516 - | #casalog.post("retdict.keys=%s" % retdict.keys()) |
517 - | retdict['MJD0'] = retdict['data']['MJD']['value'][0] - retdict['dMJD'] |
518 - | else: |
519 - | casalog.post("The table will not be usable with me.framecomet because it lacks MJD.") |
520 - | |
521 - | # adding posrefsys keyword |
522 - | if cols['RA']['comment'].count('J2000'): |
523 - | retdict['posrefsys']='ICRF/J2000.0' |
524 - | if cols['RA']['comment'].count('B1950'): |
525 - | retdict['posrefsys']='FK4/B1950.0' |
526 - | |
527 - | return retdict |
528 - | |
529 - | def convert_radec(radec_col): |
530 - | """ |
531 - | Returns a column of RAs or declinations as strings, radec_col, as a |
532 - | quantity column. (Unfortunately MeasComet assumes the columns are |
533 - | Quantities instead of Measures, and uses GeoDist == 0.0 to toggle between |
534 - | APP and TOPO.) |
535 - | """ |
536 - | angstrlist = radec_col['data'] |
537 - | angq = {} |
538 - | nrows = len(angstrlist) |
539 - | |
540 - | if len(angstrlist[0].split()) > 1: |
541 - | # Prep angstrlist for qa.toangle() |
542 - | if radec_col['comment'][:len("declination")].lower() == 'declination': |
543 - | for i in range(nrows): |
544 - | dms = angstrlist[i].replace(' ', 'd', 1) |
545 - | angstrlist[i] = dms.replace(' ', 'm') + 's' |
546 - | else: # R.A. |
547 - | for i in range(nrows): |
548 - | angstrlist[i] = angstrlist[i].replace(' ', ':') |
549 - | |
550 - | # Do first conversion to get unit. |
551 - | try: |
552 - | firstang = _qa.toangle(angstrlist[0]) |
553 - | except Exception: |
554 - | casalog.post("Error: Could not convert %s to an angle." % angstrlist[0], 'ERROR') |
555 - | raise |
556 - | angq['unit'] = firstang['unit'] |
557 - | angq['value'] = [firstang['value']] |
558 - | |
559 - | for angstr in angstrlist[1:]: |
560 - | angq['value'].append(_qa.toangle(angstr)['value']) |
561 - | else: |
562 - | angq['unit'] = 'deg' # This is an assumption! |
563 - | angq['value'] = [float(a) for a in angstrlist] |
564 - | |
565 - | return {'comment': radec_col['comment'], |
566 - | 'data': {'unit': angq['unit'], |
567 - | 'value': scipy.array(angq['value'])}} |
568 - | |
569 - | def get_num_from_str(fstr, wanted="float"): |
570 - | """ |
571 - | Like float(fstr) on steroids, in that it ignores things in fstr that aren't |
572 - | numbers. Returns False on failure. |
573 - | |
574 - | wanted: an optional label for the type of number you wanted. |
575 - | Only used for distinguishing error messages. |
576 - | |
577 - | Example: |
578 - | >>> from JPLephem_reader import get_num_from_str |
579 - | >>> get_num_from_str(' Sidereal rot. period = 58.6462 d ') |
580 - | 58.6462 |
581 - | >>> get_num_from_str('Rotation period = 16.11+-0.01 hr', wanted='rotation period') |
582 - | 16.109999999999999 |
583 - | >>> get_num_from_str('Rotation period = Synchronous', wanted='rotation period') |
584 - | Could not convert "Rotation period = Synchronous" to a rotation period. |
585 - | False |
586 - | """ |
587 - | match = re.search(r'([-+]?(\d+(\.\d*)?|\d*\.\d+)([eEdD][-+]?\d+)?)', fstr) |
588 - | if match: |
589 - | value = float(match.group(1)) |
590 - | else: |
591 - | casalog.post("Could not convert \"%s\" to a %s." % (fstr, wanted)) |
592 - | value = False |
593 - | return value |
594 - | |
595 - | def mean_radius(a, b, c): |
596 - | """ |
597 - | Return the average apparent mean radius of an ellipsoid with semiaxes |
598 - | a >= b >= c. |
599 - | "average" means average over time naively assuming the pole orientation |
600 - | is uniformly distributed over the whole sphere, and "apparent mean radius" |
601 - | means a radius that would give the same area as the apparent disk. |
602 - | """ |
603 - | # This is an approximation, but it's not bad. |
604 - | # The exact equations for going from a, b, c, and the Euler angles to the |
605 - | # apparent ellipse are given in Drummond et al, Icarus, 1985a. |
606 - | # It's the integral over the spin phase that I have approximated, so the |
607 - | # approximation is exact for b == a, and appears to hold well for b << a. |
608 - | R = 0.5 * c**2 * (1.0 / b**2 + 1.0 / a**2) # The magic ratio. |
609 - | if R < 0.95: |
610 - | sqrt1mR = scipy.sqrt(1.0 - R) |
611 - | # There is fake singularity (RlnR) at R = 0, but it is unlikely to |
612 - | # be a problem. |
613 - | try: |
614 - | Rterm = 0.5 * R * scipy.log((1.0 + sqrt1mR) / (1.0 - sqrt1mR)) / sqrt1mR |
615 - | except: |
616 - | Rterm = 0.0 |
617 - | else: |
618 - | # Use a (rapidly converging) series expansion to avoid a fake |
619 - | # singularity at R = 1. |
620 - | Rterm = 1.0 # 0th order |
621 - | onemR = 1.0 - R |
622 - | onemRtothei = 1.0 |
623 - | for i in range(1, 5): # Start series at 1st order. |
624 - | onemRtothei *= onemR |
625 - | Rterm -= onemRtothei / (0.5 + 2.0 * i**2) |
626 - | avalfabeta = 0.5 * a * b * (1.0 + Rterm) |
627 - | return scipy.sqrt(avalfabeta) |
628 - | |
629 - | def mean_radius_with_known_theta(retdict): |
630 - | """ |
631 - | Return the average apparent mean radius of an ellipsoid with semiaxes |
632 - | a >= b >= c (= retdict['radii']['value']). |
633 - | "average" means average over a rotation period, and "apparent mean radius" |
634 - | means the radius of a circle with the same area as the apparent disk. |
635 - | """ |
636 - | a = retdict['radii']['value'][0] |
637 - | b2 = retdict['radii']['value'][1]**2 |
638 - | c2 = retdict['radii']['value'][2]**2 |
639 - | onemboa2 = 1.0 - b2 / a**2 |
640 - | units = {} |
641 - | values = {} |
642 - | for c in ['RA', 'DEC', 'NP_RA', 'NP_DEC']: |
643 - | units[c] = retdict['data'][c]['data']['unit'] |
644 - | values[c] = retdict['data'][c]['data']['value'] |
645 - | av = 0.0 |
646 - | nrows = len(retdict['data']['RA']['data']['value']) |
647 - | for i in range(nrows): |
648 - | radec = _me.direction('app', {'unit': units['RA'], 'value': values['RA'][i]}, |
649 - | {'unit': units['DEC'], 'value': values['DEC'][i]}) |
650 - | np = _me.direction('j2000', {'unit': units['NP_RA'], 'value': values['NP_RA'][i]}, |
651 - | {'unit': units['NP_DEC'], 'value': values['NP_DEC'][i]}) |
652 - | szeta2 = scipy.sin(_qa.convert(_me.separation(radec, np), 'rad')['value'])**2 |
653 - | csinz2 = c2 * szeta2 |
654 - | bcosz2 = b2 * (1.0 - szeta2) |
655 - | bcz2pcsz2 = bcosz2 + csinz2 |
656 - | m = csinz2 * onemboa2 / bcz2pcsz2 |
657 - | av += (scipy.sqrt(bcz2pcsz2) * scipy.special.ellipe(m) - av) / (i + 1.0) |
658 - | return scipy.sqrt(2.0 * a * av / scipy.pi) |
659 - | |
660 - | def datestr_to_epoch(datestr): |
661 - | """ |
662 - | Given a UT date like "2010-May-01 00:00", returns an epoch measure. |
663 - | """ |
664 - | return _me.epoch(rf='UTC', v0=_qa.totime(datestr)) |
665 - | |
666 - | def datestrs_to_MJDs(cdsdict): |
667 - | """ |
668 - | All of the date strings must have the same reference frame (i.e. UT). |
669 - | """ |
670 - | datestrlist = cdsdict['data'] |
671 - | |
672 - | # Convert to FITS format, otherwise qa.totime() will silently drop the hours. |
673 - | datestrlist = [d.replace(' ', 'T') for d in datestrlist] |
674 - | |
675 - | timeq = {} |
676 - | # Do first conversion to get unit. |
677 - | firsttime = _qa.totime(datestrlist[0]) |
678 - | timeq['unit'] = firsttime['unit'] |
679 - | timeq['value'] = [firsttime['value']] |
680 - | |
681 - | for datestr in datestrlist[1:]: |
682 - | timeq['value'].append(_qa.totime(datestr)['value']) |
683 - | |
684 - | return {'unit': timeq['unit'], |
685 - | 'value': scipy.array(timeq['value'])} |
686 - | |
687 - | def construct_tablepath(fmdict, prefix=''): |
688 - | """ |
689 - | Construct a suitable pathname for a CASA table made from fmdict, |
690 - | starting with prefix. prefix can contain a /. |
691 - | |
692 - | If prefix is not given, it will be set to |
693 - | "ephem_JPL-Horizons_%s" % fmdict['NAME'] |
694 - | """ |
695 - | if not prefix: |
696 - | prefix = "ephem_JPL-Horizons_%s" % fmdict['NAME'] |
697 - | return prefix + "_%.0f-%.0f%s%s.tab" % (fmdict['earliest']['m0']['value'], |
698 - | fmdict['latest']['m0']['value'], |
699 - | fmdict['latest']['m0']['unit'], |
700 - | fmdict['latest']['refer']) |
701 - | |
702 - | def dict_to_table(indict, tablepath, kwkeys=[], colkeys=[], info=None, keepcolorder=False): |
703 - | """ |
704 - | Converts a dictionary to a CASA table, and attempts to |
705 - | save it to tablepath. Returns whether or not it was successful. |
706 - | |
707 - | kwkeys is a list of keys in dict that should be treated as table keywords, |
708 - | and colkeys is a list of keys to be treated as table columns. If a key in |
709 - | indict is not in either kwkeys or colkeys, it will be appended to colkeys |
710 - | if it refers to a list, array, or specially formed dict with the right |
711 - | number of rows, or kwkeys otherwise. |
712 - | |
713 - | "Specially formed dict" means a python dictionary with the right keys to |
714 - | provide a comment and/or keywords to specify a (measure) frame or |
715 - | (quantity) unit for the column. |
716 - | |
717 - | The number of rows is set by the first column. The order of the columns is |
718 - | the order of colkeys, followed by the remaining columns in alphabetical |
719 - | order. |
720 - | |
721 - | Example: |
722 - | mydict = {'delta': [1.2866, 1.2957, 1.3047], |
723 - | 'obs_code': ['*', 'U', 't'], |
724 - | 'date': {'m0': {'unit': 'd', |
725 - | 'value': [55317.0, 55318.0, 55319.0]}, |
726 - | 'refer': 'UT1', |
727 - | 'type': 'epoch'}, |
728 - | 'phang': {'comment': 'phase angle', |
729 - | 'data': {'unit': 'deg', |
730 - | 'value': array([37.30, 37.33, 37.36])}}} |
731 - | |
732 - | # Produces a table with, in order, a measure column (date), two bare |
733 - | # columns (delta and obs_code), and a commented quantity column (phang). |
734 - | # The comment goes in the 'comment' field of the column description. |
735 - | # Measure and straight array columns can also be described by using a |
736 - | # {'comment': (description), 'data': (measure, quantity, numpy.array or |
737 - | # list)} dict. |
738 - | dict_to_table(mydict, 'd_vs_phang.tab') |
739 - | |
740 - | TODO: detect non-float data types, including array cells. |
741 - | """ |
742 - | nrows = 0 |
743 - | dkeys = list(indict.keys()) |
744 - | keywords = [] |
745 - | cols = [] |
746 - | |
747 - | def get_bare_col(col): |
748 - | """ |
749 - | Given a col that could be a bare column (list or array), or measure or |
750 - | quantity containing a bare column, return the bare column. |
751 - | """ |
752 - | barecol = col |
753 - | if isinstance(barecol,dict): |
754 - | if 'comment' in barecol: |
755 - | barecol = barecol.get('data') |
756 - | if type(barecol)==dict and _me.ismeasure(barecol): |
757 - | barecol = barecol['m0'] |
758 - | # if qa.isquantity(data) can't be trusted. |
759 - | if isinstance(barecol,dict) and 'unit' in barecol and 'value' in barecol: |
760 - | barecol = barecol['value'] |
761 - | return barecol |
762 - | |
763 - | # Divvy up the known keywords and columns, if present, preserving the |
764 - | # requested order. |
765 - | for kw in kwkeys: |
766 - | if kw in dkeys: |
767 - | # Take kw out of dkeys and put it in keywords. |
768 - | keywords.append(dkeys.pop(dkeys.index(kw))) |
769 - | for c in colkeys: |
770 - | if c in dkeys: |
771 - | cols.append(dkeys.pop(dkeys.index(c))) |
772 - | if nrows == 0: |
773 - | nrows = len(get_bare_col(indict[c])) |
774 - | casalog.post("Got nrows = %s from %s" % (nrows,c)) |
775 - | |
776 - | # Go through what's left of dkeys and assign them to either keywords or |
777 - | # cols. |
778 - | dkeys.sort() |
779 - | for d in dkeys: |
780 - | used_as_col = False |
781 - | colcand = get_bare_col(indict[d]) |
782 - | # Treat it as a column if it has the right number of rows. |
783 - | if type(colcand) in (list, numpy.ndarray): |
784 - | if nrows == 0: |
785 - | nrows = len(colcand) |
786 - | if len(colcand) == nrows: |
787 - | cols.append(d) |
788 - | used_as_col = True |
789 - | if not used_as_col: |
790 - | keywords.append(d) |
791 - | |
792 - | # Make the table's description. |
793 - | tabdesc = {} |
794 - | # Initialize the column descriptor with defaults (these come from |
795 - | # data/ephemerides/DE200, but I replaced IncrementalStMan with StandardStMan). |
796 - | coldesc = {'comment': '', |
797 - | 'dataManagerGroup': '', |
798 - | 'dataManagerType': 'StandardStMan', |
799 - | 'maxlen': 0, |
800 - | 'option': 0, |
801 - | 'valueType': 'double'} # Use double (not float!) for columns |
802 - | # that will be read by MeasIERS. |
803 - | for c in cols: |
804 - | # casalog.post("Setting coldesc for" + c) |
805 - | data = indict[c] # Place to find the valueType. |
806 - | |
807 - | if isinstance(data,dict): |
808 - | # casalog.post("comment =" + data.get('comment', '')) |
809 - | coldesc['comment'] = data.get('comment', '') |
810 - | |
811 - | data = get_bare_col(data) |
812 - | datatype = type(data[0]) |
813 - | if datatype == float or datatype == numpy.float_: |
814 - | valtype = "float" |
815 - | elif datatype == numpy.float64: |
816 - | valtype = "double" |
817 - | elif datatype == int or datatype == numpy.int32 or datatype == numpy.int16 or datatype == numpy.int_: |
818 - | valtype = integer |
819 - | elif datatype == str: |
820 - | valtype = 'string' |
821 - | |
822 - | # Use double (not float!) for columns that will be read by MeasIERS. |
823 - | if valtype == 'float': |
824 - | valtype = 'double' |
825 - | |
826 - | coldesc['valueType'] = valtype |
827 - | |
828 - | tabdesc[c] = coldesc.copy() |
829 - | |
830 - | # Since tables are directories, it saves a lot of grief if we first check |
831 - | # whether the table exists and is under svn control. |
832 - | svndir = None |
833 - | if os.path.isdir(tablepath): |
834 - | if os.path.isdir(tablepath + '/.svn'): |
835 - | # tempfile is liable to use /tmp, which can be too small and/or slow. |
836 - | # Use the directory that tablepath is in, since we know the user |
837 - | # approves of writing to it. |
838 - | workingdir = os.path.abspath(os.path.dirname(tablepath.rstrip('/'))) |
839 - | |
840 - | svndir = tempfile.mkdtemp(dir=workingdir) |
841 - | shutil.move(tablepath + '/.svn', svndir) |
842 - | casalog.post("Removing %s directory %s" % tablepath) |
843 - | shutil.rmtree(tablepath) |
844 - | |
845 - | # Create and fill the table. |
846 - | retval = True |
847 - | try: |
848 - | mytb = tbtool() |
849 - | tmpfname='_tmp_fake.dat' |
850 - | if keepcolorder: |
851 - | # try to keep order of cols |
852 - | # Ugly, but since tb.create() cannot accept odered dictionary |
853 - | # for tabledesc, I cannot find any other way to keep column order. |
854 - | # * comment for each column will not be filled |
855 - | f = open(tmpfname,'w') |
856 - | zarr=numpy.zeros(len(cols)) |
857 - | szarr=str(zarr.tolist()) |
858 - | szarr=szarr.replace('[','') |
859 - | szarr=szarr.replace(']','') |
860 - | szarr=szarr.replace(',','') |
861 - | scollist='' |
862 - | sdtypes='' |
863 - | for c in cols: |
864 - | scollist+=c+' ' |
865 - | vt=tabdesc[c]['valueType'] |
866 - | if vt=='string': |
867 - | sdtypes+='A ' |
868 - | elif vt=='integer': |
869 - | sdtypes+='I ' |
870 - | elif vt=='double': |
871 - | sdtypes+='D ' |
872 - | elif vt=='float': |
873 - | sdtypes+='R ' |
874 - | |
875 - | f.write(scollist+'\n') |
876 - | f.write(sdtypes+'\n') |
877 - | f.write(szarr) |
878 - | |
879 - | f.close() |
880 - | mytb.fromascii(tablepath,tmpfname,sep=' ') |
881 - | # close and re-open since tb.fromascii(nomodify=False) has not |
882 - | # implemented yet |
883 - | mytb.close() |
884 - | os.remove(tmpfname) |
885 - | mytb.open(tablepath, nomodify=False) |
886 - | mytb.removerows(0) |
887 - | else: |
888 - | mytb.create(tablepath, tabdesc) |
889 - | if type(info) == dict: |
890 - | mytb.putinfo(info) |
891 - | mytb.addrows(nrows) # Must be done before putting the columns. |
892 - | |
893 - | except Exception as e: |
894 - | casalog.post("Error %s trying to create %s" % (e,tablepath), 'ERROR') |
895 - | retval = False |
896 - | |
897 - | for c in cols: |
898 - | try: |
899 - | #casalog.post("tabdesc[%s] =" % c + tabdesc[c]) |
900 - | data = indict[c] # Note the trickle-down nature below. |
901 - | if isinstance(indict[c],dict) and 'comment' in indict[c]: |
902 - | data = data['data'] |
903 - | if type(data)==dict and _me.ismeasure(data): |
904 - | mytb.putcolkeyword(c, 'MEASINFO', {'Ref': data['refer'], |
905 - | 'type': data['type']}) |
906 - | data = data['m0'] # = quantity |
907 - | # if qa.isquantity(data) can't be trusted. |
908 - | if isinstance(data,dict) and 'unit' in data and 'value' in data: |
909 - | mytb.putcolkeyword(c, 'QuantumUnits', |
910 - | numpy.array([data['unit']])) |
911 - | data = data['value'] |
912 - | mytb.putcol(c, data) |
913 - | except Exception as e: |
914 - | casalog.post("Error %s trying to put column %s in %s" % (e,c,tablepath), 'ERROR') |
915 - | casalog.post("data[0] = %s" % data[0]) |
916 - | casalog.post("tabdesc[c] = %s" % tabdesc[c]) |
917 - | retval = False |
918 - | |
919 - | for k in keywords: |
920 - | try: |
921 - | mytb.putkeyword(k, indict[k]) |
922 - | except Exception as e: |
923 - | casalog.post("Error %s trying to put keyword %s in %s" % (e,k,tablepath), 'ERROR') |
924 - | retval = False |
925 - | mytb.close() |
926 - | |
927 - | if svndir: |
928 - | shutil.move(svndir, tablepath + '/.svn') |
929 - | |
930 - | return retval |
931 - | |
932 - | def ephem_dict_to_table(fmdict, tablepath='', prefix=''): |
933 - | """ |
934 - | Converts a dictionary from readJPLephem() to a CASA table, and attempts to |
935 - | save it to either to tablepath or a constructed directory name. |
936 - | Returns whether or not it was successful. |
937 - | |
938 - | If tablepath is blank and prefix is not given, the table will go to |
939 - | something like ephem_JPL-Horizons_NAME_EARLIEST-LATESTdUTC.tab. |
940 - | |
941 - | If tablepath is blank and prefix is given, the table will go to |
942 - | something like prefix_EARLIEST-LATESTdUTC.tab. prefix can contain a /. |
943 - | """ |
944 - | if not tablepath: |
945 - | tablepath = construct_tablepath(fmdict, prefix) |
946 - | casalog.post("Writing to %s" % tablepath) |
947 - | |
948 - | # safe gaurd from zapping current directory by dict_to_table() |
949 - | if tablepath=='.' or tablepath=='./' or tablepath.isspace(): |
950 - | raise Exception("Invalid tablepath: %s" % tablepath) |
951 - | retval = True |
952 - | # keepcolorder=T preserves column ordering in collist below |
953 - | #keepcolorder=False |
954 - | keepcolorder=True |
955 - | try: |
956 - | outdict = fmdict.copy() # Yes, I want a shallow copy. |
957 - | #kws = fmdict.keys() |
958 - | # reorder the keywords |
959 - | okws=['VS_CREATE','VS_DATE','VS_TYPE', 'VS_VERSION', 'NAME', 'MJD0', 'dMJD', |
960 - | 'GeoDist', 'GeoLat', 'GeoLong', 'obsloc', 'posrefsys','earliest','latest', |
961 - | 'radii','meanrad','orb_per','data'] |
962 - | oldkws = list(fmdict.keys()) |
963 - | kws=[] |
964 - | for ik in okws: |
965 - | if oldkws.count(ik): |
966 - | kws.append(ik) |
967 - | oldkws.remove(ik) |
968 - | kws+=oldkws |
969 - | kws.remove('data') |
970 - | collist = list(outdict['data'].keys()) |
971 - | |
972 - | # For cosmetic reasons, encourage a certain order to the columns, i.e. |
973 - | # start with alphabetical order, |
974 - | collist.sort() |
975 - | # but put these ones first, in the listed order (ignore the reverse and |
976 - | # the pops) if they are present. |
977 - | #put_these_first = ['MJD', 'RA', 'DEC', 'Rho', 'RadVel', 'NP_RA', 'NP_DEC', |
978 - | # 'DiskLong', 'DiskLat', 'sl_lon', 'sl_lat', 'r', |
979 - | # 'ang_sep', 'obs_code'] |
980 - | put_these_first = ['MJD', 'RA', 'DEC', 'Rho', 'RadVel', 'NP_ang', 'NP_dist', |
981 - | # 'Obs_Long', 'Obs_Lat', 'Sl_lon', 'Sl_lat', 'r','rdot'] |
982 - | 'DiskLong', 'DiskLat', 'Sl_lon', 'Sl_lat', 'r','rdot'] |
983 - | # Like l.sort(), reverse() acts on its instance instead of returning a value. |
984 - | put_these_first.reverse() |
985 - | for c in put_these_first: |
986 - | if c in collist: |
987 - | collist.remove(c) |
988 - | collist.insert(0, c) |
989 - | |
990 - | clashing_cols = [c for c in collist if c in kws] |
991 - | if clashing_cols: |
992 - | raise ValueError('The input dictionary lists %s as both keyword(s) and column(s)' % ', '.join(clashing_cols)) |
993 - | |
994 - | # This promotes the keys in outdict['data'] up one level, and removes |
995 - | # 'data' as a key of outdict. |
996 - | outdict.update(outdict.pop('data')) |
997 - | |
998 - | # This is primarily because MeasComet insists on it, not because it |
999 - | # ever gets used. Maybe subType should be changed to 'Asteroid', |
1000 - | # 'Moon', or 'Planet', but I'm leaving it at 'Comet' for now. |
1001 - | info = {'readme': 'Derived by JPLephem_reader.py from a JPL-Horizons ephemeris (http://ssd.jpl.nasa.gov/horizons.cgi#top)', |
1002 - | 'subType': 'Comet', 'type': 'IERS'} |
1003 - | |
1004 - | retval = dict_to_table(outdict, tablepath, kws, collist, info, keepcolorder) |
1005 - | except Exception as e: |
1006 - | casalog.post('Error %s trying to export an ephemeris dict to %s' % (e,tablepath)) |
1007 - | retval = False |
1008 - | |
1009 - | return retval |
1010 - | |
1011 - | |
1012 - | def jplfiles_to_repository(objs, jpldir='.', jplext='.ephem', |
1013 - | log='null'): |
1014 - | """ |
1015 - | For each Solar System object obj in the list objs, |
1016 - | look for matching JPL-Horizons ASCII files with jplext in jpldir, |
1017 - | read them into python dictionaries, |
1018 - | write the dicts to CASA tables in $CASAROOT/data/ephemerides/JPL-Horizons/, |
1019 - | and check that they can be read by me.framecomet(). |
1020 - | Returns the number of ephemerides processed + readable by me.framecomet. |
1021 - | |
1022 - | jpldir and jplext can be glob patterns. |
1023 - | |
1024 - | $CASAROOT is derived from $CASAPATH. |
1025 - | |
1026 - | Log messages will be directed to log for the duration of this function. |
1027 - | Note that 'null' makes a NullLogSink, so it might be better than /dev/null. |
1028 - | |
1029 - | Example: |
1030 - | import recipes.ephemerides.request as jplreq |
1031 - | objs = jplreq.asteroids.keys() + jplreq.planets_and_moons.keys() |
1032 - | jplfiles_to_repository(objs, os.getenv('CASAPATH').split()[0]) |
1033 - | """ |
1034 - | neph = 0 |
1035 - | casapath = os.getenv('CASAPATH') |
1036 - | if not casapath: |
1037 - | casalog.post("CASAPATH is not set.", 'ERROR') |
1038 - | return 0 |
1039 - | datadir = casapath.split()[0] + '/data/ephemerides/JPL-Horizons' |
1040 - | if not os.path.isdir(datadir): |
1041 - | try: |
1042 - | os.mkdir(datadir) |
1043 - | casalog.post("Created %s" % datadir) |
1044 - | casalog.post("You should probably svn add it.") |
1045 - | except Exception as e: |
1046 - | casalog.post("Error %s creating %s" % (e,datadir), 'ERROR') |
1047 - | return 0 |
1048 - | datadir += '/' |
1049 - | |
1050 - | #oldlog = casalog.logfile() |
1051 - | # This is needed to stop WARN and above from printing to the console, |
1052 - | # but it permanently severs the logger window. |
1053 - | #casalog.setglobal(True) |
1054 - | #casalog.setlogfile(log) |
1055 - | |
1056 - | if jpldir[-1] != '/': |
1057 - | jpldir += '/' |
1058 - | for sob in objs: |
1059 - | capob = sob.capitalize() |
1060 - | lob = sob.lower() |
1061 - | jplfiles = glob(jpldir + lob + jplext) + glob(jpldir + capob + jplext) |
1062 - | for jplfile in jplfiles: |
1063 - | casalog.post('Reading ' + jplfile) |
1064 - | fmdict = readJPLephem(jplfile) |
1065 - | tabpath = construct_tablepath(fmdict, datadir + capob) |
1066 - | ephem_dict_to_table(fmdict, tabpath) |
1067 - | |
1068 - | # Check if it is readable by me.framecomet. |
1069 - | epoch = fmdict['earliest'] |
1070 - | epoch['m0']['value'] += 0.5 * (fmdict['latest']['m0']['value'] - |
1071 - | epoch['m0']['value']) |
1072 - | _me.doframe(epoch) |
1073 - | if _me.framecomet(tabpath): |
1074 - | neph += 1 |
1075 - | else: |
1076 - | casalog.post(tabpath + " was not readable by me.framecomet.", |
1077 - | 'WARN') |
1078 - | |
1079 - | #casalog.setlogfile(oldlog) |
1080 - | |
1081 - | return neph |