18
18
import * as fs from 'fs' ;
19
19
import * as path from 'path' ;
20
20
21
- if ( ! process . env . JSON_PATH || ! process . env . ARROW_PATH ) {
22
- throw new Error ( 'Integration tests need paths to both json and arrow files' ) ;
23
- }
21
+ import Arrow from '../Arrow' ;
22
+ import { zip } from 'ix/iterable/zip' ;
23
+ import { toArray } from 'ix/iterable/toarray' ;
24
24
25
- const jsonPath = path . resolve ( process . env . JSON_PATH + '' ) ;
26
- const arrowPath = path . resolve ( process . env . ARROW_PATH + ' ') ;
25
+ /* tslint:disable */
26
+ const { parse : bignumJSONParse } = require ( 'json-bignum ') ;
27
27
28
- if ( ! fs . existsSync ( jsonPath ) || ! fs . existsSync ( arrowPath ) ) {
29
- throw new Error ( 'Integration tests need both json and arrow files to exist' ) ;
30
- }
28
+ const { Table, read } = Arrow ;
31
29
32
- /* tslint:disable */
33
- const { parse } = require ( 'json-bignum' ) ;
30
+ if ( ! process . env . JSON_PATHS || ! process . env . ARROW_PATHS ) {
31
+ throw new Error ( 'Integration tests need paths to both json and arrow files' ) ;
32
+ }
34
33
35
- const jsonData = parse ( fs . readFileSync ( jsonPath , 'utf8' ) ) ;
36
- const arrowBuffers : Uint8Array [ ] = [ fs . readFileSync ( arrowPath ) ] ;
34
+ function resolvePathArgs ( paths : string ) {
35
+ let pathsArray = JSON . parse ( paths ) as string | string [ ] ;
36
+ return ( Array . isArray ( pathsArray ) ? pathsArray : [ pathsArray ] )
37
+ . map ( ( p ) => path . resolve ( p ) )
38
+ . map ( ( p ) => {
39
+ if ( fs . existsSync ( p ) ) {
40
+ return p ;
41
+ }
42
+ console . warn ( `Could not find file "${ p } "` ) ;
43
+ return undefined ;
44
+ } ) ;
45
+ }
37
46
38
- import Arrow from '../Arrow' ;
39
- import { zip } from 'ix/iterable/zip' ;
40
- import { toArray } from 'ix/iterable/toarray' ;
47
+ const getOrReadFileBuffer = ( ( cache : any ) => function getFileBuffer ( path : string , ... args : any [ ] ) {
48
+ return cache [ path ] || ( cache [ path ] = fs . readFileSync ( path , ... args ) ) ;
49
+ } ) ( { } ) ;
41
50
42
- const { Table, read } = Arrow ;
51
+ const jsonAndArrowPaths = toArray ( zip (
52
+ resolvePathArgs ( process . env . JSON_PATHS ! ) ,
53
+ resolvePathArgs ( process . env . ARROW_PATHS ! )
54
+ ) )
55
+ . filter ( ( [ p1 , p2 ] ) => p1 !== undefined && p2 !== undefined ) as [ string , string ] [ ] ;
43
56
44
57
expect . extend ( {
45
58
toEqualVector ( v1 : any , v2 : any ) {
@@ -66,7 +79,7 @@ expect.extend({
66
79
67
80
for ( let i = - 1 , n = props . length ; ++ i < n ; ) {
68
81
const prop = props [ i ] ;
69
- if ( this . utils . stringify ( v1 [ prop ] ) !== this . utils . stringify ( v2 [ prop ] ) ) {
82
+ if ( ` ${ v1 [ prop ] } ` !== ` ${ v2 [ prop ] } ` ) {
70
83
propsFailures . push ( `${ prop } : ${ format ( v1 [ prop ] , v2 [ prop ] , ' !== ' ) } ` ) ;
71
84
}
72
85
}
@@ -98,35 +111,43 @@ expect.extend({
98
111
} ) ;
99
112
100
113
describe ( `Integration` , ( ) => {
101
- testReaderIntegration ( ) ;
102
- testTableFromBuffersIntegration ( ) ;
114
+ for ( const [ jsonFilePath , arrowFilePath ] of jsonAndArrowPaths ) {
115
+ let { name, dir } = path . parse ( arrowFilePath ) ;
116
+ dir = dir . split ( path . sep ) . slice ( - 2 ) . join ( path . sep ) ;
117
+ const json = bignumJSONParse ( getOrReadFileBuffer ( jsonFilePath , 'utf8' ) ) ;
118
+ const arrowBuffer = getOrReadFileBuffer ( arrowFilePath ) as Uint8Array ;
119
+ describe ( path . join ( dir , name ) , ( ) => {
120
+ testReaderIntegration ( json , arrowBuffer ) ;
121
+ testTableFromBuffersIntegration ( json , arrowBuffer ) ;
122
+ } ) ;
123
+ }
103
124
} ) ;
104
125
105
- function testReaderIntegration ( ) {
106
- test ( `json and arrow buffers report the same values` , ( ) => {
107
- debugger ;
126
+ function testReaderIntegration ( jsonData : any , arrowBuffer : Uint8Array ) {
127
+ test ( `json and arrow record batches report the same values` , ( ) => {
108
128
expect . hasAssertions ( ) ;
109
129
const jsonRecordBatches = toArray ( read ( jsonData ) ) ;
110
- const binaryRecordBatches = toArray ( read ( arrowBuffers ) ) ;
130
+ const binaryRecordBatches = toArray ( read ( arrowBuffer ) ) ;
111
131
for ( const [ jsonRecordBatch , binaryRecordBatch ] of zip ( jsonRecordBatches , binaryRecordBatches ) ) {
112
132
expect ( jsonRecordBatch . length ) . toEqual ( binaryRecordBatch . length ) ;
113
133
expect ( jsonRecordBatch . numCols ) . toEqual ( binaryRecordBatch . numCols ) ;
114
134
for ( let i = - 1 , n = jsonRecordBatch . numCols ; ++ i < n ; ) {
135
+ ( jsonRecordBatch . columns [ i ] as any ) . name = jsonRecordBatch . schema . fields [ i ] . name ;
115
136
( expect ( jsonRecordBatch . columns [ i ] ) as any ) . toEqualVector ( binaryRecordBatch . columns [ i ] ) ;
116
137
}
117
138
}
118
139
} ) ;
119
140
}
120
141
121
- function testTableFromBuffersIntegration ( ) {
122
- test ( `json and arrow buffers report the same values` , ( ) => {
123
- debugger ;
142
+ function testTableFromBuffersIntegration ( jsonData : any , arrowBuffer : Uint8Array ) {
143
+ test ( `json and arrow tables report the same values` , ( ) => {
124
144
expect . hasAssertions ( ) ;
125
145
const jsonTable = Table . from ( jsonData ) ;
126
- const binaryTable = Table . from ( arrowBuffers ) ;
146
+ const binaryTable = Table . from ( arrowBuffer ) ;
127
147
expect ( jsonTable . length ) . toEqual ( binaryTable . length ) ;
128
148
expect ( jsonTable . numCols ) . toEqual ( binaryTable . numCols ) ;
129
149
for ( let i = - 1 , n = jsonTable . numCols ; ++ i < n ; ) {
150
+ ( jsonTable . columns [ i ] as any ) . name = jsonTable . schema . fields [ i ] . name ;
130
151
( expect ( jsonTable . columns [ i ] ) as any ) . toEqualVector ( binaryTable . columns [ i ] ) ;
131
152
}
132
153
} ) ;
0 commit comments