@@ -26,7 +26,38 @@ export default {
26
26
// https://groups.google.com/a/chromium.org/g/chromium-extensions/c/ffI0iNd79oo
27
27
// https://github.dev/GoogleChrome/chrome-extensions-samples/api-samples/tabCapture
28
28
29
- onClickExtension : async ( ) => {
29
+ onClick : async ( ) => {
30
+ // https://developer.chrome.com/docs/extensions/how-to/web-platform/screen-capture
31
+ const stream = await navigator . mediaDevices . getDisplayMedia ( {
32
+ audio : true ,
33
+ video : true ,
34
+ } ) ;
35
+
36
+ drawVisualizer ( stream ) ;
37
+
38
+ // const streamId = await UfsGlobal.Extension.runInBackground(
39
+ // "chrome.tabCapture.getMediaStreamId"
40
+ // );
41
+ // navigator.webkitGetUserMedia(
42
+ // {
43
+ // audio: {
44
+ // mandatory: {
45
+ // chromeMediaSource: "tab", // The media source must be 'tab' here.
46
+ // chromeMediaSourceId: streamId,
47
+ // },
48
+ // },
49
+ // video: false,
50
+ // },
51
+ // function (stream) {
52
+ // console.log(stream);
53
+ // },
54
+ // function (error) {
55
+ // console.error(error);
56
+ // }
57
+ // );
58
+ } ,
59
+
60
+ _onClickExtension : async ( ) => {
30
61
try {
31
62
// const url = "http://127.0.0.1:5500/public/music-visualizer/index.html";
32
63
const url = await chrome . runtime . getURL (
@@ -86,232 +117,4 @@ export default {
86
117
console . log ( e ) ;
87
118
}
88
119
} ,
89
-
90
- onClick_ : async ( ) => {
91
- javascript: ( function ( ) {
92
- var ctx ;
93
- var width = 1000 ;
94
- var fftHeight = 250 ;
95
- var height = fftHeight + 20 ;
96
- var fftSize = 2048 ; // number of samples used to generate each FFT
97
- var frequencyBins = fftSize / 2 ; // number of frequency bins in FFT
98
- var video ;
99
-
100
- function requestPIPCanvas ( canvas ) {
101
- const stream = canvas . captureStream ( ) ;
102
- if ( ! video ) {
103
- video = document . createElement ( "video" ) ;
104
- video . autoplay = true ;
105
- video . style . display = "none" ;
106
- }
107
- video . srcObject = stream ;
108
- document . body . appendChild ( video ) ;
109
- setTimeout ( ( ) => {
110
- video . requestPictureInPicture ?. ( ) ;
111
- } , 500 ) ;
112
- }
113
-
114
- function draggable ( ele ) {
115
- // Variables to store the position of the canvas
116
- var offsetX , offsetY ;
117
- var isDragging = false ;
118
-
119
- // Function to handle mouse down event
120
- ele . addEventListener ( "mousedown" , function ( event ) {
121
- isDragging = true ;
122
- offsetX = event . clientX - ele . offsetLeft ;
123
- offsetY = event . clientY - ele . offsetTop ;
124
- } ) ;
125
-
126
- // Function to handle mouse move event
127
- document . addEventListener ( "mousemove" , function ( event ) {
128
- if ( ! isDragging ) return ;
129
- var x = event . clientX - offsetX ;
130
- var y = event . clientY - offsetY ;
131
- ele . style . left = x + "px" ;
132
- ele . style . top = y + "px" ;
133
- } ) ;
134
-
135
- // Function to handle mouse up event
136
- document . addEventListener ( "mouseup" , function ( ) {
137
- isDragging = false ;
138
- } ) ;
139
- }
140
-
141
- function map ( x , in_min , in_max , out_min , out_max ) {
142
- return (
143
- ( ( x - in_min ) * ( out_max - out_min ) ) / ( in_max - in_min ) + out_min
144
- ) ;
145
- }
146
-
147
- function smoothFFT ( fftArray , smoothingFactor = 0.8 ) {
148
- let smoothedFFT = [ ] ;
149
- smoothedFFT [ 0 ] = fftArray [ 0 ] ;
150
- for ( let i = 1 ; i < fftArray . length ; i ++ ) {
151
- smoothedFFT [ i ] =
152
- fftArray [ i ] * smoothingFactor +
153
- smoothedFFT [ i - 1 ] * ( 1 - smoothingFactor ) ;
154
- }
155
- return smoothedFFT ;
156
- }
157
-
158
- function highlightBass (
159
- fftArray ,
160
- samplingRate = 44100 ,
161
- bassRange = [ 20 , 200 ]
162
- ) {
163
- const fftSize = fftArray . length ;
164
- const threshold = 0.5 ; // Adjust threshold value as needed (0 for hard removal)
165
-
166
- for ( let i = 0 ; i < fftSize ; i ++ ) {
167
- const freq = ( i * samplingRate ) / fftSize ;
168
- if ( freq < bassRange [ 0 ] || freq > bassRange [ 1 ] ) {
169
- fftArray [ i ] *= threshold ; // Apply threshold instead of hard removal
170
- }
171
- }
172
-
173
- return fftArray ;
174
- }
175
-
176
- function logScale ( fftArray , minDecibels = - 60 , maxDecibels = 0 ) {
177
- let minAmplitude = Math . pow ( 10 , minDecibels / 10 ) ;
178
- let maxAmplitude = Math . pow ( 10 , maxDecibels / 10 ) ;
179
-
180
- const scale = ( val ) => {
181
- const scaledValue =
182
- 10 * Math . log10 ( Math . max ( val , minAmplitude ) ) -
183
- 10 * Math . log10 ( minAmplitude ) ;
184
- return Math . min ( scaledValue , maxDecibels ) ; // Cap the output at maxDecibels
185
- } ;
186
-
187
- return fftArray . map ( ( val ) => scale ( val ) ) ;
188
- }
189
-
190
- function drawLinearFFT ( dataArray , canvasCtx ) {
191
- canvasCtx . clearRect ( 0 , 0 , width , height ) ;
192
- canvasCtx . beginPath ( ) ;
193
-
194
- var sliceLength = width / frequencyBins ;
195
-
196
- for ( var i = 0 ; i < frequencyBins ; i ++ ) {
197
- var x = i * sliceLength ;
198
- var y = fftHeight - ( dataArray [ i ] * fftHeight ) / 256 ;
199
- canvasCtx . lineTo ( x , y ) ;
200
- }
201
-
202
- canvasCtx . stroke ( ) ;
203
- }
204
-
205
- function drawLogarithmicFFT ( dataArray , canvasCtx ) {
206
- canvasCtx . clearRect ( 0 , 0 , width , height ) ;
207
- canvasCtx . beginPath ( ) ;
208
-
209
- var scale = Math . log ( frequencyBins - 1 ) / width ;
210
- var binWidthFreq = ctx . sampleRate / ( frequencyBins * 2 ) ;
211
- var firstBinWidthPixels = Math . log ( 2 ) / scale ;
212
-
213
- for ( var i = 1 ; i < frequencyBins ; i ++ ) {
214
- var x = Math . log ( i ) / scale ;
215
- var y = fftHeight - ( dataArray [ i ] * fftHeight ) / 256 ;
216
- canvasCtx . lineTo ( x , y ) ;
217
- }
218
-
219
- canvasCtx . stroke ( ) ;
220
- }
221
-
222
- function createAudioContext ( ) {
223
- const audioContext = new ( window . AudioContext ||
224
- window . webkitAudioContext ) ( ) ;
225
- ctx = audioContext ;
226
- const analyser = audioContext . createAnalyser ( ) ;
227
- analyser . fftSize = fftSize ;
228
- const bufferLength = analyser . frequencyBinCount ;
229
- const dataArray = new Uint8Array ( bufferLength ) ;
230
-
231
- const canvas = document . createElement ( "canvas" ) ;
232
- canvas . width = width ;
233
- canvas . height = height ;
234
- canvas . style . cssText =
235
- "position: fixed; top: 0; left: 0; z-index: 2147483647; background: #333a;" ;
236
- document . body . appendChild ( canvas ) ;
237
- const canvasCtx = canvas . getContext ( "2d" ) ;
238
- draggable ( canvas ) ;
239
-
240
- canvas . onclick = function ( ) {
241
- requestPIPCanvas ( canvas ) ;
242
- } ;
243
-
244
- function draw ( ) {
245
- analyser . getByteFrequencyData ( dataArray ) ;
246
-
247
- canvasCtx . strokeStyle = "rgba(255, 255, 255, 0.9)" ;
248
- drawLogarithmicFFT ( dataArray , canvasCtx ) ;
249
-
250
- // canvasCtx.clearRect(0, 0, canvas.width, canvas.height);
251
- // const barWidth = ~~(bufferLength / canvas.width);
252
-
253
- // const arr = highlightBass(dataArray, audioContext.sampleRate);
254
- // canvasCtx.beginPath();
255
- // canvasCtx.strokeStyle = "rgba(255, 255, 255, 0.9)";
256
-
257
- // for (let x = 0; x < canvas.width; x++) {
258
- // let i = x * barWidth;
259
- // let item = arr[i];
260
- // const barHeight = map(item, 0, 255, 0, canvas.height);
261
-
262
- // // line
263
- // canvasCtx.lineTo(x, canvas.height - barHeight);
264
-
265
- // // canvasCtx.fillStyle = `rgba(255, 255, 255, ${map(item, 0, 255, 0, 1)})`;
266
- // // canvasCtx.fillRect(x, canvas.height - barHeight, 1, barHeight);
267
- // }
268
- // canvasCtx.stroke();
269
- requestAnimationFrame ( draw ) ;
270
- }
271
-
272
- draw ( ) ;
273
-
274
- function handleVideoAudio ( videoElement ) {
275
- const source = audioContext . createMediaElementSource ( videoElement ) ;
276
- source . connect ( analyser ) ;
277
- analyser . connect ( audioContext . destination ) ;
278
- }
279
-
280
- return { handleVideoAudio, canvas } ;
281
- }
282
-
283
- function startAudioAnalysis ( ) {
284
- if ( ! window . AudioContext ) {
285
- alert ( "Your browser doesn't support Web Audio API" ) ;
286
- return ;
287
- }
288
-
289
- const videoElements = document . querySelectorAll ( "video" ) ;
290
- const contexts = [ ] ;
291
-
292
- videoElements . forEach ( ( videoElement ) => {
293
- const { handleVideoAudio, canvas } = createAudioContext ( ) ;
294
- handleVideoAudio ( videoElement ) ;
295
- contexts . push ( { canvas, videoElement } ) ;
296
- } ) ;
297
-
298
- // Keep checking for new videos on the page
299
- // setInterval(() => {
300
- // const newVideos = document.querySelectorAll("video");
301
- // newVideos.forEach((videoElement) => {
302
- // const exists = contexts.some(
303
- // (context) => context.videoElement === videoElement
304
- // );
305
- // if (!exists) {
306
- // const { handleVideoAudio, canvas } = createAudioContext();
307
- // handleVideoAudio(videoElement);
308
- // contexts.push({ canvas, videoElement });
309
- // }
310
- // });
311
- // }, 2000);
312
- }
313
-
314
- startAudioAnalysis ( ) ;
315
- } ) ( ) ;
316
- } ,
317
120
} ;
0 commit comments