@@ -68,8 +68,7 @@ export class SentryHttpInstrumentationBeforeOtel extends InstrumentationBase {
68
68
return original . apply ( this , args ) ;
69
69
}
70
70
71
- const response = args [ 1 ] as http . OutgoingMessage ;
72
-
71
+ const response = args [ 2 ] as http . OutgoingMessage ;
73
72
patchResponseToFlushOnServerlessPlatforms ( response ) ;
74
73
75
74
return original . apply ( this , args ) ;
@@ -81,50 +80,57 @@ export class SentryHttpInstrumentationBeforeOtel extends InstrumentationBase {
81
80
function patchResponseToFlushOnServerlessPlatforms ( res : http . OutgoingMessage ) : void {
82
81
// Freely extend this function with other platforms if necessary
83
82
if ( process . env . VERCEL ) {
83
+ DEBUG_BUILD && logger . log ( 'Patching response to flush on Vercel' ) ;
84
+
84
85
// In some cases res.end does not seem to be defined leading to errors if passed to Proxy
85
86
// https://github.com/getsentry/sentry-javascript/issues/15759
86
- if ( typeof res . end === 'function' ) {
87
- let markOnEndDone = ( ) : void => undefined ;
88
- const onEndDonePromise = new Promise < void > ( res => {
89
- markOnEndDone = res ;
90
- } ) ;
91
-
92
- res . on ( 'close' , ( ) => {
93
- markOnEndDone ( ) ;
94
- } ) ;
95
-
96
- // eslint-disable-next-line @typescript-eslint/unbound-method
97
- res . end = new Proxy ( res . end , {
98
- apply ( target , thisArg , argArray ) {
99
- vercelWaitUntil (
100
- new Promise < void > ( finishWaitUntil => {
101
- // Define a timeout that unblocks the lambda just to be safe so we're not indefinitely keeping it alive, exploding server bills
102
- const timeout = setTimeout ( ( ) => {
103
- finishWaitUntil ( ) ;
104
- } , 2000 ) ;
105
-
106
- onEndDonePromise
107
- . then ( ( ) => {
108
- DEBUG_BUILD && logger . log ( 'Flushing events before Vercel Lambda freeze' ) ;
109
- return flush ( 2000 ) ;
110
- } )
111
- . then (
112
- ( ) => {
113
- clearTimeout ( timeout ) ;
114
- finishWaitUntil ( ) ;
115
- } ,
116
- e => {
117
- clearTimeout ( timeout ) ;
118
- DEBUG_BUILD && logger . log ( 'Error while flushing events for Vercel:\n' , e ) ;
119
- finishWaitUntil ( ) ;
120
- } ,
121
- ) ;
122
- } ) ,
123
- ) ;
124
-
125
- return target . apply ( thisArg , argArray ) ;
126
- } ,
127
- } ) ;
87
+ if ( typeof res . end !== 'function' ) {
88
+ DEBUG_BUILD && logger . warn ( 'res.end is not a function, skipping patch...' ) ;
89
+ return ;
128
90
}
91
+
92
+ let markOnEndDone = ( ) : void => undefined ;
93
+ const onEndDonePromise = new Promise < void > ( res => {
94
+ markOnEndDone = res ;
95
+ } ) ;
96
+
97
+ res . on ( 'close' , ( ) => {
98
+ markOnEndDone ( ) ;
99
+ } ) ;
100
+
101
+ logger . log ( 'Patching res.end()' ) ;
102
+
103
+ // eslint-disable-next-line @typescript-eslint/unbound-method
104
+ res . end = new Proxy ( res . end , {
105
+ apply ( target , thisArg , argArray ) {
106
+ vercelWaitUntil (
107
+ new Promise < void > ( finishWaitUntil => {
108
+ // Define a timeout that unblocks the lambda just to be safe so we're not indefinitely keeping it alive, exploding server bills
109
+ const timeout = setTimeout ( ( ) => {
110
+ finishWaitUntil ( ) ;
111
+ } , 2000 ) ;
112
+
113
+ onEndDonePromise
114
+ . then ( ( ) => {
115
+ DEBUG_BUILD && logger . log ( 'Flushing events before Vercel Lambda freeze' ) ;
116
+ return flush ( 2000 ) ;
117
+ } )
118
+ . then (
119
+ ( ) => {
120
+ clearTimeout ( timeout ) ;
121
+ finishWaitUntil ( ) ;
122
+ } ,
123
+ e => {
124
+ clearTimeout ( timeout ) ;
125
+ DEBUG_BUILD && logger . log ( 'Error while flushing events for Vercel:\n' , e ) ;
126
+ finishWaitUntil ( ) ;
127
+ } ,
128
+ ) ;
129
+ } ) ,
130
+ ) ;
131
+
132
+ return target . apply ( thisArg , argArray ) ;
133
+ } ,
134
+ } ) ;
129
135
}
130
136
}
0 commit comments