@@ -31,6 +31,14 @@ TableStore.decoder = {
31
31
} ,
32
32
decodeDescribeTable : function ( buffers ) {
33
33
var response = tsProtos . DescribeTableResponse . decode ( buffers ) ;
34
+ for ( var i in response . indexMetas ) {
35
+ var indexMeta = response . indexMetas [ i ] ;
36
+ if ( indexMeta . indexSyncPhase === tsProtos . IndexSyncPhase . ISP_INCR ) {
37
+ response . indexMetas [ i ] . indexSyncPhase = TableStore . SyncPhase . INCR ;
38
+ } else {
39
+ response . indexMetas [ i ] . indexSyncPhase = TableStore . SyncPhase . FULL ;
40
+ }
41
+ }
34
42
return response ;
35
43
} ,
36
44
_parseRowItem : function ( row ) {
@@ -66,7 +74,7 @@ TableStore.decoder = {
66
74
decodeGetRange : function ( buffers ) {
67
75
var returnResp = { } ;
68
76
var response = tsProtos . GetRangeResponse . decode ( buffers ) ;
69
-
77
+
70
78
returnResp . consumed = response . consumed ;
71
79
response . rows . limit = response . rows . offset + response . rows . length ;
72
80
if ( response . rows && response . rows . limit > response . rows . offset ) {
@@ -85,11 +93,11 @@ TableStore.decoder = {
85
93
} else {
86
94
returnResp . nextStartPrimaryKey = null ;
87
95
}
88
-
96
+
89
97
returnResp . compressType = response . compressType ;
90
98
returnResp . dataBlockType = response . dataBlockType ;
91
99
returnResp . nextToken = response . nextToken ;
92
-
100
+
93
101
return returnResp ;
94
102
} ,
95
103
decodeBatchGetRow : function ( buffers ) {
@@ -215,8 +223,31 @@ TableStore.decoder = {
215
223
}
216
224
}
217
225
}
226
+ response . indexStatus = TableStore . decoder . _parseIndexStatus ( response . indexStatus ) ;
218
227
return response ;
219
228
} ,
229
+ _parseIndexStatus : function ( indexStatus ) {
230
+ var parseIndexStatus = {
231
+ statusDescription : indexStatus . statusDescription
232
+ }
233
+ switch ( indexStatus . status ) {
234
+ case tsSearchProtos . IndexStatusEnum . PENDING : {
235
+ parseIndexStatus . status = TableStore . IndexStatus . PENDING ;
236
+ break ;
237
+ }
238
+ case tsSearchProtos . IndexStatusEnum . FAILED : {
239
+ parseIndexStatus . status = TableStore . IndexStatus . FAILED ;
240
+ break ;
241
+ }
242
+ case tsSearchProtos . IndexStatusEnum . RUNNING : {
243
+ parseIndexStatus . status = TableStore . IndexStatus . RUNNING ;
244
+ break ;
245
+ }
246
+ default :
247
+ parseIndexStatus . status = TableStore . IndexStatus . UNKNOWN ;
248
+ }
249
+ return parseIndexStatus ;
250
+ } ,
220
251
decodeUpdateSearchIndex : function ( buffers ) {
221
252
return tsSearchProtos . UpdateSearchIndexResponse . decode ( buffers ) ;
222
253
} ,
@@ -253,8 +284,66 @@ TableStore.decoder = {
253
284
result . groupBys = TableStore . decoder . _parseGroupBys ( response . groupBys ) ;
254
285
}
255
286
287
+ result . searchHits = [ ] ;
288
+ if ( response . rows . length !== response . searchHits . length && response . searchHits . length !== 0 ) {
289
+ console . log ( "the row count is not equal to search extra result item count in server response body, ignore the search extra result items." ) ;
290
+ } else {
291
+ for ( var i in response . searchHits ) {
292
+ result . searchHits . push ( TableStore . decoder . _parseSearchHit ( response . searchHits [ i ] , response . rows [ i ] ) ) ;
293
+ }
294
+ }
295
+ if ( response . consumed ) {
296
+ result . consumed = response . consumed ;
297
+ }
298
+ if ( response . reservedConsumed ) {
299
+ result . reservedConsumed = response . reservedConsumed ;
300
+ }
301
+
256
302
return result ;
257
303
} ,
304
+ _parseSearchHit ( searchHit , row ) {
305
+ var parseSearchHit = {
306
+ row : row ,
307
+ }
308
+ if ( searchHit . nestedDocOffset ) {
309
+ parseSearchHit . nestedDocOffset = searchHit . nestedDocOffset ;
310
+ }
311
+ if ( searchHit . score ) {
312
+ parseSearchHit . score = searchHit . score ;
313
+ }
314
+ if ( searchHit . highlightResult ) {
315
+ parseSearchHit . highlightResultItem = TableStore . decoder . _parseHighlightResultItem ( searchHit ) ;
316
+ }
317
+ parseSearchHit . searchInnerHits = new Map ( ) ;
318
+ for ( var item in searchHit . searchInnerHits ) {
319
+ var searchInnerHit = searchHit . searchInnerHits [ item ] ;
320
+ parseSearchHit . searchInnerHits . set ( searchInnerHit . path , TableStore . decoder . _parseSearchInnerHit ( searchInnerHit ) )
321
+ }
322
+ return parseSearchHit ;
323
+ } ,
324
+ _parseSearchInnerHit ( searchInnerHit ) {
325
+ var parseSearchInnerHit = {
326
+ path : searchInnerHit . path ,
327
+ }
328
+ parseSearchInnerHit . subSearchHits = [ ] ;
329
+ for ( var item in searchInnerHit . searchHits ) {
330
+ parseSearchInnerHit . subSearchHits . push ( this . _parseSearchHit ( searchInnerHit . searchHits [ item ] , null ) ) ;
331
+ }
332
+ return parseSearchInnerHit ;
333
+ } ,
334
+ _parseHighlightResultItem ( searchHit ) {
335
+ var highlightFields = new Map ( ) ;
336
+ for ( var item in searchHit . highlightResult . highlightFields ) {
337
+ var parseHighlightField = {
338
+ fragments : searchHit . highlightResult . highlightFields [ item ] . fieldFragments ,
339
+ }
340
+ highlightFields . set ( searchHit . highlightResult . highlightFields [ item ] . fieldName , parseHighlightField )
341
+ }
342
+ var parseHighlightResultItem = {
343
+ highlightFields : highlightFields
344
+ }
345
+ return parseHighlightResultItem ;
346
+ } ,
258
347
_parseSearchVariant ( bytes ) {
259
348
let inputStream = new TableStore . PlainBufferInputStream ( bytes ) ;
260
349
let codedInputStream = new TableStore . PlainBufferCodedInputStream ( inputStream ) ;
@@ -457,6 +546,54 @@ TableStore.decoder = {
457
546
items : resultItems ,
458
547
} ;
459
548
}
549
+ case tsSearchProtos . GroupByType . GROUP_BY_DATE_HISTOGRAM : {
550
+ let groupByDateHistogramResult = tsSearchProtos . GroupByDateHistogramResult . decode ( groupByResultBody ) ;
551
+ let resultItems = [ ] ;
552
+ TableStore . util . arrayEach ( groupByDateHistogramResult . groupByDateHistogramItems , function ( item ) {
553
+ let itemResult = {
554
+ timeStamp : item . timestamp ,
555
+ rowCount : item . rowCount ,
556
+ } ;
557
+ TableStore . decoder . _parseSubAggAndGroupByResult ( itemResult , item ) ;
558
+ resultItems . push ( itemResult ) ;
559
+ } ) ;
560
+ return {
561
+ items : resultItems ,
562
+ } ;
563
+ }
564
+ case tsSearchProtos . GroupByType . GROUP_BY_GEO_GRID : {
565
+ let groupByGeoGridResult = tsSearchProtos . GroupByGeoGridResult . decode ( groupByResultBody ) ;
566
+ let resultItems = [ ] ;
567
+ TableStore . util . arrayEach ( groupByGeoGridResult . groupByGeoGridResultItems , function ( item ) {
568
+ let itemResult = {
569
+ key : item . key ,
570
+ rowCount : item . rowCount ,
571
+ geoGrid : item . geoGrid ,
572
+ } ;
573
+ TableStore . decoder . _parseSubAggAndGroupByResult ( itemResult , item ) ;
574
+ resultItems . push ( itemResult ) ;
575
+ } ) ;
576
+ return {
577
+ items : resultItems ,
578
+ } ;
579
+ }
580
+ case tsSearchProtos . GroupByType . GROUP_BY_COMPOSITE : {
581
+ let groupByCompositeResult = tsSearchProtos . GroupByCompositeResult . decode ( groupByResultBody ) ;
582
+ let resultItems = [ ] ;
583
+ TableStore . util . arrayEach ( groupByCompositeResult . groupByCompositeResultItems , function ( item ) {
584
+ let itemResult = {
585
+ keys : item . keys ,
586
+ rowCount : item . rowCount ,
587
+ } ;
588
+ TableStore . decoder . _parseSubAggAndGroupByResult ( itemResult , item ) ;
589
+ resultItems . push ( itemResult ) ;
590
+ } ) ;
591
+ return {
592
+ sourceNames : groupByCompositeResult . sourceGroupByNames ,
593
+ nextToken : groupByCompositeResult . nextToken || null ,
594
+ items : resultItems ,
595
+ } ;
596
+ }
460
597
default :
461
598
throw new Error ( "not exist groupByType: " + groupByType ) ;
462
599
}
0 commit comments