@@ -8,6 +8,7 @@ const path = require('path');
88require ( 'winston-daily-rotate-file' ) ;
99const ProgressBar = require ( 'progress' ) ;
1010const BlueBirdPromise = require ( "bluebird" ) ;
11+ const glob = require ( 'glob' ) ;
1112
1213const logger = require ( '../lib/log' ) ;
1314const { DEFAULT_CHUNK_SIZE , MAX_CHUNK } = require ( '../lib/constants' ) ;
@@ -33,7 +34,7 @@ process.on('uncaughtException', error => {
3334 logger . error ( error . stack ) ;
3435} )
3536
36- const upload = async ( filePath , parts = [ ] ) => {
37+ const upload = async ( filePath , parts = [ ] , requestUrl ) => {
3738 const bar = new ProgressBar ( ':bar [:current/:total] :percent ' , { total : totalChunk } ) ;
3839 const uploadChunk = async ( currentChunk , currentChunkIndex , parts , isRetry ) => {
3940 if ( parts . some ( ( { partNumber, size } ) => partNumber === currentChunkIndex && size === currentChunk . length ) ) {
@@ -47,7 +48,7 @@ const upload = async (filePath, parts = []) => {
4748 version,
4849 partNumber : currentChunkIndex ,
4950 size : currentChunk . length ,
50- currentChunk
51+ currentChunk
5152 } , {
5253 headers : {
5354 'Content-Type' : 'application/octet-stream'
@@ -75,14 +76,14 @@ const upload = async (filePath, parts = []) => {
7576 }
7677 }
7778
78- console . log ( `\n开始上传\n` )
79- logger . info ( ' 开始上传' )
79+ console . log ( `\n开始上传 ( ${ filePath } ) \n` ) ;
80+ logger . info ( ` 开始上传 ( ${ filePath } )` ) ;
8081
8182 try {
8283
83- const chunkIndexs = new Array ( totalChunk ) . fill ( "" ) . map ( ( _ , index ) => index + 1 )
84+ const chunkIndexs = new Array ( totalChunk ) . fill ( "" ) . map ( ( _ , index ) => index + 1 )
8485
85- await BlueBirdPromise . map ( chunkIndexs , ( currentChunkIndex ) => {
86+ await BlueBirdPromise . map ( chunkIndexs , ( currentChunkIndex ) => {
8687 const start = ( currentChunkIndex - 1 ) * chunkSize ;
8788 const end = ( ( start + chunkSize ) >= fileSize ) ? fileSize : start + chunkSize - 1 ;
8889 const stream = fs . createReadStream ( filePath , { start, end } )
@@ -113,9 +114,9 @@ const upload = async (filePath, parts = []) => {
113114
114115
115116
116-
117117
118- const merge = async ( ) => {
118+
119+ const merge = async ( ) => {
119120 console . log ( chalk . cyan ( '正在合并分片,请稍等...' ) )
120121 return await _mergeAllChunks ( requestUrl , {
121122 version,
@@ -126,7 +127,7 @@ const upload = async (filePath, parts = []) => {
126127 Authorization
127128 } ) ;
128129 }
129-
130+
130131
131132 try {
132133 const res = await withRetry ( merge , 3 , 500 ) ;
@@ -140,11 +141,11 @@ const upload = async (filePath, parts = []) => {
140141 return ;
141142 }
142143
143- console . log ( chalk . green ( `\n上传完毕\n` ) )
144+ console . log ( chalk . green ( `\n上传完毕 ( ${ filePath } ) \n` ) )
144145 logger . info ( '************************ 上传完毕 ************************' )
145146}
146147
147- const getFileMD5Success = async ( filePath ) => {
148+ const getFileMD5Success = async ( filePath , requestUrl ) => {
148149 try {
149150 const res = await _getExistChunks ( requestUrl , {
150151 fileSize,
@@ -160,10 +161,10 @@ const getFileMD5Success = async (filePath) => {
160161
161162 // 上传过一部分
162163 if ( Array . isArray ( res . data . parts ) ) {
163- await upload ( filePath , res . data . parts ) ;
164+ await upload ( filePath , res . data . parts , requestUrl ) ;
164165 } else {
165166 // 未上传过
166- await upload ( filePath ) ;
167+ await upload ( filePath , [ ] , requestUrl ) ;
167168 }
168169 } catch ( error ) {
169170 logger . error ( error . message ) ;
@@ -173,20 +174,20 @@ const getFileMD5Success = async (filePath) => {
173174 }
174175}
175176
176- const getFileMD5 = async ( filePath ) => {
177+ const getFileMD5 = async ( filePath , requestUrl ) => {
177178 totalChunk = Math . ceil ( fileSize / DEFAULT_CHUNK_SIZE ) ;
178179 if ( totalChunk > MAX_CHUNK ) {
179180 chunkSize = Math . ceil ( fileSize / MAX_CHUNK ) ;
180181 totalChunk = Math . ceil ( fileSize / chunkSize ) ;
181182 }
182183 const spark = new SparkMD5 . ArrayBuffer ( ) ;
183184 try {
184- console . log ( `\n开始计算 MD5\n` )
185- logger . info ( ' 开始计算 MD5' )
185+ console . log ( `\n开始计算 MD5 ( ${ filePath } ) \n` ) ;
186+ logger . info ( ` 开始计算 MD5 ( ${ filePath } )` ) ;
186187
187188 const bar = new ProgressBar ( ':bar [:current/:total] :percent ' , { total : totalChunk } ) ;
188189 await new Promise ( resolve => {
189- stream = fs . createReadStream ( filePath , { highWaterMark : chunkSize } )
190+ stream = fs . createReadStream ( filePath , { highWaterMark : chunkSize } ) ;
190191 stream . on ( 'data' , chunk => {
191192 bar . tick ( ) ;
192193 spark . append ( chunk )
@@ -198,7 +199,7 @@ const getFileMD5 = async (filePath) => {
198199 md5 = spark . end ( ) ;
199200 spark . destroy ( ) ;
200201 console . log ( `\n文件 MD5:${ md5 } \n` )
201- await getFileMD5Success ( filePath ) ;
202+ await getFileMD5Success ( filePath , requestUrl ) ;
202203 resolve ( ) ;
203204 } )
204205 } ) . catch ( error => {
@@ -212,14 +213,70 @@ const getFileMD5 = async (filePath) => {
212213 }
213214}
214215
216+ const uploadFile = async ( filePath , size , requestUrl ) => {
217+ fileSize = size ;
218+ await getFileMD5 ( filePath , requestUrl ) ;
219+ md5 = '' ;
220+ uploadId = '' ;
221+ fileSize = 0 ;
222+ chunkSize = DEFAULT_CHUNK_SIZE ;
223+ totalChunk = 0 ;
224+ }
225+
226+ const uploadDir = async ( dir ) => {
227+ let files = [ ] ;
228+ try {
229+ files = await new Promise ( ( resolve , reject ) => {
230+ glob ( "**/**" , {
231+ cwd : dir ,
232+ root : dir
233+ } , function ( error , files = [ ] ) {
234+ if ( error ) {
235+ reject ( error ) ;
236+ } else {
237+ resolve ( files )
238+ }
239+ } )
240+ } ) ;
241+ } catch ( error ) {
242+ if ( error ) {
243+ console . log ( chalk . red ( ( error . response && error . response . data ) || error . message ) ) ;
244+ logger . error ( error . message ) ;
245+ logger . error ( error . stack ) ;
246+ process . exit ( 1 ) ;
247+ } else {
248+ resolve ( files )
249+ }
250+ }
251+
252+
253+ for ( const file of files ) {
254+ const filePath = path . join ( dir , file ) ;
255+ const stat = fs . lstatSync ( filePath ) ;
256+ const isDirectory = stat . isDirectory ( ) ;
257+ if ( ! isDirectory ) {
258+ const url = new URL ( `chunks/${ dir . split ( path . sep ) . pop ( ) } /${ file } ` , requestUrl . endsWith ( '/' ) ? requestUrl : `${ requestUrl } /` ) . toString ( ) ;
259+ await uploadFile ( filePath , stat . size , url ) ;
260+ console . log ( '************************ **** ************************' ) ;
261+ logger . info ( '************************ **** ************************' ) ;
262+ }
263+ }
264+ }
265+
215266const beforeUpload = async ( filePath ) => {
267+ const isUploadDir = argv . dir ;
268+ let fSize = 0 ;
216269 try {
217270 const stat = fs . lstatSync ( filePath ) ;
218- if ( stat . isDirectory ( ) ) {
271+ const isDirectory = stat . isDirectory ( ) ;
272+ if ( isDirectory && ! isUploadDir ) {
219273 console . log ( chalk . red ( `\n${ filePath } 不合法,需指定一个文件\n` ) )
220274 process . exit ( 1 ) ;
275+ } else if ( ! isDirectory && isUploadDir ) {
276+ console . log ( chalk . red ( `\n${ filePath } 不合法,需指定一个文件夹\n` ) )
277+ process . exit ( 1 ) ;
221278 }
222- fileSize = stat . size ;
279+ fSize = stat . size ;
223280 } catch ( error ) {
224281 if ( error . code === 'ENOENT' ) {
225282 console . log ( chalk . red ( `未找到 ${ filePath } ` ) ) ;
@@ -230,7 +287,11 @@ const beforeUpload = async (filePath) => {
230287 }
231288 process . exit ( 1 ) ;
232289 }
233- await getFileMD5 ( filePath ) ;
290+ if ( isUploadDir ) {
291+ await uploadDir ( filePath ) ;
292+ } else {
293+ await uploadFile ( filePath , fSize , requestUrl ) ;
294+ }
234295}
235296
236297const onUpload = ( _username , _password ) => {
0 commit comments