4
4
*--------------------------------------------------------------------------------------------*/
5
5
6
6
import * as nls from "vs/nls" ;
7
+ import * as vszip from "vszip" ;
7
8
import * as fs from "fs" ;
8
9
import * as path from "path" ;
9
10
import * as tarStream from "tar-stream" ;
10
11
import { promisify } from "util" ;
11
- import { ILogService } from "vs/platform/log/common/log" ;
12
12
import { CancellationToken } from "vs/base/common/cancellation" ;
13
13
import { mkdirp } from "vs/base/node/pfs" ;
14
14
15
15
export interface IExtractOptions {
16
16
overwrite ?: boolean ;
17
17
18
18
/**
19
- * Source path within the ZIP archive. Only the files contained in this
20
- * path will be extracted.
19
+ * Source path within the TAR/ ZIP archive. Only the files
20
+ * contained in this path will be extracted.
21
21
*/
22
22
sourcePath ?: string ;
23
23
}
@@ -28,11 +28,15 @@ export interface IFile {
28
28
localPath ?: string ;
29
29
}
30
30
31
- export function zip ( tarPath : string , files : IFile [ ] ) : Promise < string > {
32
- return new Promise < string > ( ( c , e ) => {
31
+ /**
32
+ * Override the standard VS Code behavior for zipping
33
+ * extensions to use the TAR format instead of ZIP.
34
+ */
35
+ export const zip = ( tarPath : string , files : IFile [ ] ) : Promise < string > => {
36
+ return new Promise < string > ( ( c , e ) : void => {
33
37
const pack = tarStream . pack ( ) ;
34
38
const chunks : Buffer [ ] = [ ] ;
35
- const ended = new Promise < Buffer > ( ( res , rej ) => {
39
+ const ended = new Promise < Buffer > ( ( res ) : void => {
36
40
pack . on ( "end" , ( ) => {
37
41
res ( Buffer . concat ( chunks ) ) ;
38
42
} ) ;
@@ -56,132 +60,160 @@ export function zip(tarPath: string, files: IFile[]): Promise<string> {
56
60
e ( ex ) ;
57
61
} ) ;
58
62
} ) ;
59
- }
60
-
61
- export async function extract ( tarPath : string , targetPath : string , options : IExtractOptions = { } , token : CancellationToken ) : Promise < void > {
62
- const sourcePathRegex = new RegExp ( options . sourcePath ? `^${ options . sourcePath } ` : '' ) ;
63
-
64
- return new Promise < void > ( async ( c , e ) => {
65
- const buffer = await promisify ( fs . readFile ) ( tarPath ) ;
66
- const extractor = tarStream . extract ( ) ;
67
- extractor . once ( 'error' , e ) ;
68
- extractor . on ( 'entry' , ( header , stream , next ) => {
69
- const rawName = header . name ;
70
-
71
- const nextEntry = ( ) : void => {
72
- stream . resume ( ) ;
73
- next ( ) ;
74
- } ;
75
-
76
- if ( token . isCancellationRequested ) {
77
- return nextEntry ( ) ;
78
- }
79
-
80
- if ( ! sourcePathRegex . test ( rawName ) ) {
81
- return nextEntry ( ) ;
82
- }
63
+ } ;
64
+
65
+ /**
66
+ * Override the standard VS Code behavior for extracting
67
+ * archives, to first attempt to process the archive as a TAR
68
+ * and then fallback on the original implementation, for processing
69
+ * ZIPs.
70
+ */
71
+ export const extract = ( archivePath : string , extractPath : string , options : IExtractOptions = { } , token : CancellationToken ) : Promise < void > => {
72
+ return new Promise < void > ( ( c , e ) : void => {
73
+ extractTar ( archivePath , extractPath , options , token ) . then ( c ) . catch ( ( ex ) => {
74
+ if ( ! ex . toString ( ) . includes ( "Invalid tar header" ) ) {
75
+ e ( ex ) ;
83
76
84
- const fileName = rawName . replace ( sourcePathRegex , '' ) ;
85
-
86
- const targetFileName = path . join ( targetPath , fileName ) ;
87
- if ( / \/ $ / . test ( fileName ) ) {
88
- stream . resume ( ) ;
89
- mkdirp ( targetFileName ) . then ( ( ) => {
90
- next ( ) ;
91
- } , e ) ;
92
77
return ;
93
78
}
94
-
95
- const dirName = path . dirname ( fileName ) ;
96
- const targetDirName = path . join ( targetPath , dirName ) ;
97
- if ( targetDirName . indexOf ( targetPath ) !== 0 ) {
98
- e ( nls . localize ( 'invalid file' , "Error extracting {0}. Invalid file." , fileName ) ) ;
99
- return nextEntry ( ) ;
100
- }
101
-
102
- mkdirp ( targetDirName , void 0 , token ) . then ( ( ) => {
103
- const fstream = fs . createWriteStream ( targetFileName , { mode : header . mode } ) ;
104
- fstream . once ( 'close' , ( ) => {
105
- next ( ) ;
106
- } ) ;
107
- fstream . once ( 'error' , ( err ) => {
108
- e ( err ) ;
109
- } ) ;
110
- stream . pipe ( fstream ) ;
111
- stream . resume ( ) ;
112
- } ) ;
113
- } ) ;
114
- extractor . once ( 'finish' , ( ) => {
115
- c ( ) ;
79
+ vszip . extract ( archivePath , extractPath , options , token ) . then ( c ) . catch ( e ) ;
116
80
} ) ;
117
- extractor . write ( buffer ) ;
118
- extractor . end ( ) ;
119
81
} ) ;
120
- }
121
-
122
- export function buffer ( tarPath : string , filePath : string ) : Promise < Buffer > {
123
- return new Promise < Buffer > ( async ( c , e ) => {
82
+ } ;
83
+
84
+ /**
85
+ * Override the standard VS Code behavior for buffering
86
+ * archives, to first process the Buffer as a TAR and then
87
+ * fallback on the original implementation, for processing ZIPs.
88
+ */
89
+ export const buffer = ( targetPath : string , filePath : string ) : Promise < Buffer > => {
90
+ return new Promise < Buffer > ( ( c , e ) : void => {
124
91
let done : boolean = false ;
125
- extractAssets ( tarPath , new RegExp ( filePath ) , ( path : string , data : Buffer ) => {
126
- if ( path === filePath ) {
92
+ extractAssets ( targetPath , new RegExp ( filePath ) , ( assetPath : string , data : Buffer ) => {
93
+ if ( path . normalize ( assetPath ) === path . normalize ( filePath ) ) {
127
94
done = true ;
128
95
c ( data ) ;
129
96
}
130
97
} ) . then ( ( ) => {
131
98
if ( ! done ) {
132
- e ( "couldnt find asset " + filePath ) ;
99
+ e ( "couldn't find asset " + filePath ) ;
133
100
}
134
101
} ) . catch ( ( ex ) => {
135
- e ( ex ) ;
136
- } ) ;
137
- } ) ;
138
- }
102
+ if ( ! ex . toString ( ) . includes ( "Invalid tar header" ) ) {
103
+ e ( ex ) ;
139
104
140
- async function extractAssets ( tarPath : string , match : RegExp , callback : ( path : string , data : Buffer ) => void ) : Promise < void > {
141
- const buffer = await promisify ( fs . readFile ) ( tarPath ) ;
142
- const extractor = tarStream . extract ( ) ;
143
- let callbackResolve : ( ) => void ;
144
- let callbackReject : ( ex ?) => void ;
145
- const complete = new Promise < void > ( ( r , rej ) => {
146
- callbackResolve = r ;
147
- callbackReject = rej ;
148
- } ) ;
149
- extractor . once ( "error" , ( err ) => {
150
- callbackReject ( err ) ;
105
+ return ;
106
+ }
107
+ vszip . buffer ( targetPath , filePath ) . then ( c ) . catch ( e ) ;
108
+ } ) ;
151
109
} ) ;
152
- extractor . on ( "entry" , ( header , stream , next ) => {
153
- const name = header . name ;
154
- if ( match . test ( name ) ) {
155
- extractData ( stream ) . then ( ( data ) => {
156
- callback ( name , data ) ;
157
- next ( ) ;
110
+ } ;
111
+
112
+ /**
113
+ * Override the standard VS Code behavior for extracting assets
114
+ * from archive Buffers to use the TAR format instead of ZIP.
115
+ */
116
+ export const extractAssets = ( tarPath : string , match : RegExp , callback : ( path : string , data : Buffer ) => void ) : Promise < void > => {
117
+ return new Promise < void > ( async ( c , e ) : Promise < void > => {
118
+ try {
119
+ const buffer = await promisify ( fs . readFile ) ( tarPath ) ;
120
+ const extractor = tarStream . extract ( ) ;
121
+ extractor . once ( "error" , e ) ;
122
+ extractor . on ( "entry" , ( header , stream , next ) => {
123
+ const name = header . name ;
124
+ if ( match . test ( name ) ) {
125
+ extractData ( stream ) . then ( ( data ) => {
126
+ callback ( name , data ) ;
127
+ next ( ) ;
128
+ } ) . catch ( e ) ;
129
+ stream . resume ( ) ;
130
+ } else {
131
+ stream . on ( "end" , ( ) => {
132
+ next ( ) ;
133
+ } ) ;
134
+ stream . resume ( ) ;
135
+ }
158
136
} ) ;
159
- stream . resume ( ) ;
160
- } else {
161
- stream . on ( "end" , ( ) => {
162
- next ( ) ;
137
+ extractor . on ( "finish" , ( ) => {
138
+ c ( ) ;
163
139
} ) ;
164
- stream . resume ( ) ;
140
+ extractor . write ( buffer ) ;
141
+ extractor . end ( ) ;
142
+ } catch ( ex ) {
143
+ e ( ex ) ;
165
144
}
166
145
} ) ;
167
- extractor . on ( "finish" , ( ) => {
168
- callbackResolve ( ) ;
169
- } ) ;
170
- extractor . write ( buffer ) ;
171
- extractor . end ( ) ;
172
- return complete ;
173
- }
146
+ } ;
174
147
175
- async function extractData ( stream : NodeJS . ReadableStream ) : Promise < Buffer > {
176
- return new Promise < Buffer > ( ( res , rej ) => {
148
+ const extractData = ( stream : NodeJS . ReadableStream ) : Promise < Buffer > = > {
149
+ return new Promise < Buffer > ( ( c , e ) : void => {
177
150
const fileData : Buffer [ ] = [ ] ;
178
- stream . on ( ' data' , ( data ) => fileData . push ( data ) ) ;
179
- stream . on ( ' end' , ( ) => {
151
+ stream . on ( " data" , ( data ) => fileData . push ( data ) ) ;
152
+ stream . on ( " end" , ( ) => {
180
153
const fd = Buffer . concat ( fileData ) ;
181
- res ( fd ) ;
182
- } ) ;
183
- stream . on ( 'error' , ( err ) => {
184
- rej ( err ) ;
154
+ c ( fd ) ;
185
155
} ) ;
156
+ stream . on ( "error" , e ) ;
186
157
} ) ;
187
- }
158
+ } ;
159
+
160
+ const extractTar = ( tarPath : string , targetPath : string , options : IExtractOptions = { } , token : CancellationToken ) : Promise < void > => {
161
+ return new Promise < void > ( async ( c , e ) : Promise < void > => {
162
+ try {
163
+ const sourcePathRegex = new RegExp ( options . sourcePath ? `^${ options . sourcePath } ` : "" ) ;
164
+ const buffer = await promisify ( fs . readFile ) ( tarPath ) ;
165
+ const extractor = tarStream . extract ( ) ;
166
+ extractor . once ( "error" , e ) ;
167
+ extractor . on ( "entry" , ( header , stream , next ) => {
168
+ const rawName = path . normalize ( header . name ) ;
169
+
170
+ const nextEntry = ( ) : void => {
171
+ stream . resume ( ) ;
172
+ next ( ) ;
173
+ } ;
174
+
175
+ if ( token . isCancellationRequested ) {
176
+ return nextEntry ( ) ;
177
+ }
178
+
179
+ if ( ! sourcePathRegex . test ( rawName ) ) {
180
+ return nextEntry ( ) ;
181
+ }
182
+
183
+ const fileName = rawName . replace ( sourcePathRegex , "" ) ;
184
+ const targetFileName = path . join ( targetPath , fileName ) ;
185
+ if ( / \/ $ / . test ( fileName ) ) {
186
+ stream . resume ( ) ;
187
+ mkdirp ( targetFileName ) . then ( ( ) => {
188
+ next ( ) ;
189
+ } , e ) ;
190
+
191
+ return ;
192
+ }
193
+
194
+ const dirName = path . dirname ( fileName ) ;
195
+ const targetDirName = path . join ( targetPath , dirName ) ;
196
+ if ( targetDirName . indexOf ( targetPath ) !== 0 ) {
197
+ e ( nls . localize ( "invalid file" , "Error extracting {0}. Invalid file." , fileName ) ) ;
198
+
199
+ return nextEntry ( ) ;
200
+ }
201
+
202
+ return mkdirp ( targetDirName , undefined , token ) . then ( ( ) => {
203
+ const fstream = fs . createWriteStream ( targetFileName , { mode : header . mode } ) ;
204
+ fstream . once ( "close" , ( ) => {
205
+ next ( ) ;
206
+ } ) ;
207
+ fstream . once ( "error" , e ) ;
208
+ stream . pipe ( fstream ) ;
209
+ stream . resume ( ) ;
210
+ } ) ;
211
+ } ) ;
212
+ extractor . once ( "finish" , c ) ;
213
+ extractor . write ( buffer ) ;
214
+ extractor . end ( ) ;
215
+ } catch ( ex ) {
216
+ e ( ex ) ;
217
+ }
218
+ } ) ;
219
+ } ;
0 commit comments