@@ -39,10 +39,12 @@ const upload = async (filePath, parts = [], requestUrl) => {
39
39
const uploadChunk = async ( currentChunk , currentChunkIndex , parts , isRetry ) => {
40
40
if ( parts . some ( ( { partNumber, size } ) => partNumber === currentChunkIndex && size === currentChunk . length ) ) {
41
41
bar . tick ( ) ;
42
+ logger . info ( `分片(${ currentChunkIndex } )已经上传,跳过 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
42
43
return Promise . resolve ( ) ;
43
44
}
44
45
45
46
try {
47
+ logger . info ( `开始上传分片(${ currentChunkIndex } ) (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
46
48
await _uploadChunk ( requestUrl , {
47
49
uploadId,
48
50
version,
@@ -55,18 +57,23 @@ const upload = async (filePath, parts = [], requestUrl) => {
55
57
} ,
56
58
Authorization
57
59
} ) ;
60
+ logger . info ( `分片(${ currentChunkIndex } )上传完毕 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
58
61
bar . tick ( ) ;
59
62
} catch ( error ) {
63
+ console . error ( `分片(${ currentChunkIndex } )上传失败 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
64
+ logger . error ( `分片(${ currentChunkIndex } )上传失败 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
60
65
logger . error ( error . message ) ;
61
66
logger . error ( error . stack ) ;
62
67
if ( [ 'ECONNREFUSED' , 'ECONNRESET' , 'ENOENT' , 'EPROTO' ] . includes ( error . code ) ) {
63
68
// 没有重试过就重试一次
64
69
if ( ! isRetry ) {
65
70
logger . warn ( 'retry' )
66
71
logger . warn ( error . code ) ;
72
+ logger . info ( `重试分片(${ currentChunkIndex } )上传 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
67
73
await uploadChunk ( currentChunk , currentChunkIndex , parts , true ) ;
68
74
} else {
69
75
console . log ( chalk . red ( '网络连接异常,请重新执行命令继续上传' ) ) ;
76
+ logger . error ( `分片(${ currentChunkIndex } )上传时网络连接异常 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
70
77
process . exit ( 1 ) ;
71
78
}
72
79
} else {
@@ -77,18 +84,20 @@ const upload = async (filePath, parts = [], requestUrl) => {
77
84
}
78
85
79
86
console . log ( `\n开始上传 (${ filePath } )\n` ) ;
80
- logger . info ( `开始上传 (${ filePath } )` ) ;
87
+ logger . info ( `开始上传 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
81
88
82
89
try {
83
90
84
- const chunkIndexs = new Array ( totalChunk ) . fill ( "" ) . map ( ( _ , index ) => index + 1 )
91
+ const chunkIndexs = new Array ( totalChunk ) . fill ( "" ) . map ( ( _ , index ) => index + 1 ) ;
92
+
93
+ logger . info ( `分片总数:${ totalChunk } ,分片大小:${ chunkSize } (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
85
94
86
95
await BlueBirdPromise . map ( chunkIndexs , ( currentChunkIndex ) => {
87
96
const start = ( currentChunkIndex - 1 ) * chunkSize ;
88
97
const end = ( ( start + chunkSize ) >= fileSize ) ? fileSize : start + chunkSize - 1 ;
89
98
const stream = fs . createReadStream ( filePath , { start, end } )
90
99
let buf = [ ] ;
91
- return new Promise ( ( resolve ) => {
100
+ return new Promise ( ( resolve , reject ) => {
92
101
stream . on ( 'data' , data => {
93
102
buf . push ( data )
94
103
} )
@@ -101,6 +110,7 @@ const upload = async (filePath, parts = [], requestUrl) => {
101
110
resolve ( ) ;
102
111
} )
103
112
} ) . catch ( error => {
113
+ logger . error ( `读取分片 ${ currentChunkIndex } 数据失败 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
104
114
throw Error ( error )
105
115
} )
106
116
} , { concurrency : argv . concurrency } )
@@ -117,7 +127,8 @@ const upload = async (filePath, parts = [], requestUrl) => {
117
127
118
128
119
129
const merge = async ( ) => {
120
- console . log ( chalk . cyan ( '正在合并分片,请稍等...' ) )
130
+ console . log ( chalk . cyan ( '正在合并分片,请稍等...' ) ) ;
131
+ logger . info ( `正在合并分片 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
121
132
return await _mergeAllChunks ( requestUrl , {
122
133
version,
123
134
uploadId,
@@ -132,21 +143,24 @@ const upload = async (filePath, parts = [], requestUrl) => {
132
143
try {
133
144
const res = await withRetry ( merge , 3 , 500 ) ;
134
145
if ( res . code ) {
146
+ logger . error ( `合并分片失败 (path: ${ filePath } ) , url: ${ requestUrl } )` ) ;
135
147
throw ( res . message ) ;
136
148
}
137
149
} catch ( error ) {
138
150
logger . error ( error . message ) ;
139
151
logger . error ( error . stack ) ;
140
152
console . log ( chalk . red ( ( error . response && error . response . data ) || error . message ) ) ;
141
- return ;
153
+ process . exit ( 1 ) ;
142
154
}
143
155
144
156
console . log ( chalk . green ( `\n上传完毕 (${ filePath } )\n` ) )
145
- logger . info ( ' ************************ 上传完毕 ************************' )
157
+ logger . info ( ` ************************ 上传完毕 (path: ${ filePath } ) , url: ${ requestUrl } ) ************************` )
146
158
}
147
159
148
160
const getFileMD5Success = async ( filePath , requestUrl ) => {
161
+ let uploadedParts = [ ]
149
162
try {
163
+ logger . info ( `获取已上传信息 (path: ${ filePath } , url: ${ requestUrl } )` ) ;
150
164
const res = await _getExistChunks ( requestUrl , {
151
165
fileSize,
152
166
version,
@@ -158,20 +172,23 @@ const getFileMD5Success = async (filePath, requestUrl) => {
158
172
throw ( res . message ) ;
159
173
}
160
174
uploadId = res . data . uploadId ;
161
-
175
+ logger . info ( `上传的 UploadId: ${ uploadId } (path: ${ filePath } , url: ${ requestUrl } )` ) ;
162
176
// 上传过一部分
163
177
if ( Array . isArray ( res . data . parts ) ) {
164
- await upload ( filePath , res . data . parts , requestUrl ) ;
178
+ uploadedParts = res . data . parts
165
179
} else {
166
180
// 未上传过
167
- await upload ( filePath , [ ] , requestUrl ) ;
181
+ uploadedParts = [ ]
168
182
}
169
183
} catch ( error ) {
184
+ logger . error ( `获取已上传信息错误 (path: ${ filePath } , url: ${ requestUrl } )` ) ;
170
185
logger . error ( error . message ) ;
171
186
logger . error ( error . stack ) ;
172
187
console . log ( chalk . red ( ( error . response && error . response . data ) || error . message ) ) ;
173
188
process . exit ( 1 ) ;
174
189
}
190
+
191
+ await upload ( filePath , uploadedParts , requestUrl ) ;
175
192
}
176
193
177
194
const getFileMD5 = async ( filePath , requestUrl ) => {
@@ -186,8 +203,8 @@ const getFileMD5 = async (filePath, requestUrl) => {
186
203
logger . info ( `开始计算 MD5 (${ filePath } )` ) ;
187
204
188
205
const bar = new ProgressBar ( ':bar [:current/:total] :percent ' , { total : totalChunk } ) ;
189
- await new Promise ( resolve => {
190
- stream = fs . createReadStream ( filePath , { highWaterMark : chunkSize } ) ;
206
+ await new Promise ( ( resolve , reject ) => {
207
+ const stream = fs . createReadStream ( filePath , { highWaterMark : chunkSize } ) ;
191
208
stream . on ( 'data' , chunk => {
192
209
bar . tick ( ) ;
193
210
spark . append ( chunk )
@@ -203,6 +220,7 @@ const getFileMD5 = async (filePath, requestUrl) => {
203
220
resolve ( ) ;
204
221
} )
205
222
} ) . catch ( error => {
223
+ logger . error ( `计算 MD5 失败(${ filePath } )` ) ;
206
224
throw Error ( error ) ;
207
225
} )
208
226
} catch ( error ) {
@@ -215,6 +233,7 @@ const getFileMD5 = async (filePath, requestUrl) => {
215
233
216
234
const uploadFile = async ( filePath , size , requestUrl ) => {
217
235
fileSize = size ;
236
+ logger . info ( `('************************ 开始上传 (${ filePath } ) ('************************` ) ;
218
237
await getFileMD5 ( filePath , requestUrl ) ;
219
238
md5 = '' ;
220
239
uploadId = '' ;
@@ -245,7 +264,7 @@ const uploadDir = async (dir) => {
245
264
logger . error ( error . stack ) ;
246
265
process . exit ( 1 ) ;
247
266
} else {
248
- resolve ( files )
267
+ return files ;
249
268
}
250
269
}
251
270
0 commit comments