1
+ #!/usr/bin/env python
2
+
3
+ # import modules
4
+ import json
5
+ from optparse import OptionParser
6
+
7
+ # helper functions
8
+ def findVarIdx (var_dic , line_list ):
9
+ idx_dic = {}
10
+ for i in range (len (var_dic )):
11
+ key = var_dic .keys ()[i ]
12
+ if key in line_list : idx_dic [key ] = line_list .index (key ) - 1
13
+ # if not key in line_list:
14
+ # print 'SOLtojson.findVarIdx: Variable', key, 'missing . . . Filling value . . .'
15
+ # idx_dic[key] = -1 # variable not found
16
+ # else:
17
+ # idx_dic[key] = line_list.index(key) - 1
18
+ return idx_dic
19
+ def makeJSONDic (var_dic , idx_dic , line_list , fill_value ):
20
+ # if len(var_dic) != len(idx_dic):
21
+ # raise Exception('SOLtojson.makeJSONDic: Variable and index dictionaries must be same size')
22
+ out_dic = {}
23
+ for i in range (len (idx_dic )):
24
+ in_var = idx_dic .keys ()[i ]
25
+ out_var = var_dic [in_var ]
26
+ idx = idx_dic [in_var ]
27
+ if line_list [idx ] != fill_value : out_dic [out_var ] = line_list [idx ] # variable exists
28
+ # if idx != -1:
29
+ # out_dic[out_var] = line_list[idx] # variable exists
30
+ # else:
31
+ # out_dic[out_var] = fill_value # fill value
32
+ return out_dic
33
+
34
+ # parse inputs
35
+ parser = OptionParser ()
36
+ parser .add_option ("-i" , "--input" , dest = "inputfile" , default = "Generic.SOL" , type = "string" ,
37
+ help = "SOL file to parse" , metavar = "FILE" )
38
+ parser .add_option ("-o" , "--output" , dest = "outputfile" , default = "Generic.json" , type = "string" ,
39
+ help = "JSON file to create" , metavar = "FILE" )
40
+ (options , args ) = parser .parse_args ()
41
+
42
+ # mapping between DSSAT variables and json variables
43
+ header_var_map = {'SCOM' : 'sscol' , 'SALB' : 'salb' , 'SLU1' : 'slu1' , 'SLDR' : 'sldr' , \
44
+ 'SLRO' : 'slro' , 'SLNF' : 'slnf' , 'SLPF' : 'slpf' , 'SMHB' : 'smhb' , 'SMPX' : 'smpx' , \
45
+ 'SMKE' : 'smke' }
46
+ row1_var_map = {'SLB' : 'sllb' , 'SLMH' : 'slmh' , 'SLLL' : 'slll' , 'SDUL' : 'sldul' , \
47
+ 'SSAT' : 'slsat' , 'SRGF' : 'slrgf' , 'SSKS' : 'sksat' , 'SBDM' : 'slbdm' , 'SLOC' : 'sloc' , \
48
+ 'SLCL' : 'slcly' , 'SLSI' : 'slsil' , 'SLCF' : 'slcf' , 'SLNI' : 'slni' , 'SLHW' : 'slphw' , \
49
+ 'SLHB' : 'slphb' , 'SCEC' : 'slcec' , 'SADC' : 'sladc' }
50
+ row2_var_map = {'SLPX' : 'slpx' , 'SLPT' : 'slpt' , 'SLPO' : 'slpo' , 'CACO3' : 'caco3' , \
51
+ 'SLAL' : 'slal' , 'SLFE' : 'slfe' , 'SLMN' : 'slmn' , 'SLBS' : 'slbs' , 'SLPA' : 'slpa' , \
52
+ 'SLPB' : 'slpb' , 'SLKE' : 'slke' , 'SLMG' : 'slmg' , 'SLNA' : 'slna' , 'SLSU' : 'slsu' , \
53
+ 'SLEC' : 'slec' , 'SLCA' : 'slca' }
54
+
55
+ # value to fill with if datum is missing
56
+ fill_value = '-99.0'
57
+
58
+ # open SOL file
59
+ lines = [l .split () for l in tuple (open (options .inputfile , 'r' ))]
60
+
61
+ header_var_idx = findVarIdx (header_var_map , lines [3 ]) # hardcoded for 4th line
62
+ row1_var_idx = findVarIdx (row1_var_map , lines [5 ]) # hardcoded for 6th line
63
+
64
+ # calculuate number of layers and if data are stacked
65
+ num_layers = 0
66
+ stacked = False
67
+ for i in range (len (lines [6 :])):
68
+ l = lines [6 + i ]
69
+ if not l [0 ] == '@' : # key off first column
70
+ num_layers += 1
71
+ else :
72
+ stacked = True
73
+ row2_var_idx = findVarIdx (row2_var_map , l )
74
+ break
75
+
76
+ # make dictionary to store data
77
+
78
+ # write preliminary
79
+ data_dic = {}
80
+ # read 1st line
81
+ data_dic ['soil_id' ] = lines [0 ][0 ].replace ('*' , '' ) # remove asterisk from soil id
82
+ data_dic ['sl_source' ] = lines [0 ][1 ]
83
+ data_dic ['sltx' ] = lines [0 ][2 ]
84
+ data_dic ['sldp' ] = lines [0 ][3 ]
85
+ # read 3rd line
86
+ data_dic ['soil_name' ] = ' ' .join (lines [0 ][4 :])
87
+ data_dic ['sl_loc_3' ] = lines [2 ][0 ]
88
+ data_dic ['sl_loc_1' ] = lines [2 ][1 ]
89
+ data_dic ['soil_lat' ] = lines [2 ][2 ]
90
+ data_dic ['soil_long' ] = lines [2 ][3 ]
91
+ data_dic ['classification' ] = ' ' .join (lines [2 ][4 :])
92
+
93
+ # write header variables
94
+ header_dic = makeJSONDic (header_var_map , header_var_idx , lines [4 ], fill_value )
95
+ data_dic .update (header_dic )
96
+
97
+ # write specific soil parameters for each layer
98
+ data_dic ['soilLayer' ] = [0 ] * num_layers
99
+ for i in range (num_layers ):
100
+ line = lines [6 + i ]
101
+ data_dic ['soilLayer' ][i ] = makeJSONDic (row1_var_map , row1_var_idx , line , fill_value )
102
+ if stacked :
103
+ line2 = lines [7 + num_layers + i ]
104
+ print line2
105
+ stacked_dic = makeJSONDic (row2_var_map , row2_var_idx , line2 , fill_value )
106
+ data_dic ['soilLayer' ][i ].update (stacked_dic )
107
+
108
+ # save into bigger dictionary
109
+ all_data = {'soils' : [data_dic ]}
110
+
111
+ # save json file
112
+ json .dump (all_data , open (options .outputfile , 'w' ), indent = 2 , separators = (',' , ': ' ))
0 commit comments