решил задачу. В осн. помогла статья вот эта http://www.marinbezhanov.com/web-development/14/actionscript-3-sound-extract-demystified-or-how-to-draw-a-waveform-in-flash/ но мне ее пришлось не много модифицировать, поясню свою технологию 1. размеры сцены задал жестко 600х100 пикселей. (если поменять размеры, то в коде тоже придеться хорошенько понастраивать соотношения параметров). 2. SoundMixer.computeSpectrum() в этом деле вообще не катит, он лишь отображает форму звуковой волны звучащую в данную секунду. 3. Код | import flash.display.Sprite; // we need a sprite to draw the waveform to import flash.media.Sound; // we need the sound class to extract raw sound // from our Sound objects import flash.utils.ByteArray; // we need the byteArray class to store the sound // data extracted from our objects import com.adobe.images.JPGEncoder; // my, load encoder for save // I have imported a random MP3 to my Flash Project and gave it class name "TestSound" // So I just create a new Sound object using that "TestSound" MP3 that's in my library. var sound:Sound = new Sound(); // We will store the raw sound data in a ByteArray called "soundData" var soundData:ByteArray = new ByteArray();
// We need two sprites to draw the waveforms for the left and right channel var waveformLeft:Sprite = new Sprite(); var waveformRight:Sprite = new Sprite();
// my color var color = 0x333333; // We set a basic line style and reset the drawing position for each Sprite waveformLeft.graphics.moveTo(0,0); waveformLeft.graphics.lineStyle(1,color); waveformRight.graphics.moveTo(0,0); waveformRight.graphics.lineStyle(1,color);
function loadComplete(event:Event):void{ // this is how we extract the raw sound data from our Sound object. The tricky // part here is that the extract() method requires two parameters to be passed to it. // // - A reference to a ByteArray object in which the extracted raw sound data will be placed, // which is the "soundData" byteArray we defined at line 12 // // - A length parameter that specifies number of sample blocks to extract from that Sound // object. Since we already know the data will be returned as 32-bit, 44100Hz Stereo, we // can easily calculate the number of sound blocks by multiplying the sound length in // seconds by the number of samples blocks per second (aka sample rate), which is 44100, // but because the "length" property of the Sound object returns a value in milliseconds, // we need to do a conversion here dividing it by 1000, so we can get a value in seconds, // hence: // // Total Sample Blocks to Extract = (sound.length/1000)*44100; sound.extract(soundData,Math.floor((sound.length/1000)*44100)); // the extract() method places the file pointer at the end of the ByteArray (something not // mentioned in the Adobe Flash Documentation). Therefore, we need to reset the file pointer // to the beginning of the ByteArray Object, which is position = 0 soundData.position = 0; // for drawing purposes, we set our initial X Axis position to 0 and define a variable called // "xStep", which determines how many pixels along the X Axis to move with each drawing step. // // As I have decided to draw lines connecting the minimum and maximum values of every 11025 sample // blocks (or very 250ms) that means with a "xStep" value of 1 pixel, 250ms = 2 pixels, or // 1 pixel = 125ms var xPos:uint = 0; var xStep = 1; // since the raw sound data is a floating point number between -1 and 1, it will be really hard to // notice the fluctuations if we use that number as-is to draw our waveform lines. Therefore we define // an "yRation" variable to expand the visible range of the fluctuations that can be drawn on screen. // I chose the number "100" based on error and trial - you can experiment with different numbers. var yRatio:uint = stage.stageHeight/2; var my = Math.ceil((sound.length/stage.stageWidth) * 87.8); // 87.8 - число найденное методом тыка var my1 = Math.ceil((sound.length/1000)/60); //trace(my + "-" + (sound.length/stage.stageWidth) ); // we loop through the soundData until we have enough bytes to read. We determine the number of // bytes left using the "bytesAvailable" property of the "soundData" ByteArray object. while(soundData.bytesAvailable > (44100*my1)){ var leftMin:Number = Number.MAX_VALUE; // a variable to store the minimum value for the Left Channel var leftMax:Number = Number.MIN_VALUE; // a variable to store the maximum value for the Left Channel var rightMin:Number = Number.MAX_VALUE;// a variable to store the minimum value for the Right Channel var rightMax:Number = Number.MIN_VALUE; // a variable to store the maximum value for the Right Channel for (var i:uint = 0;i<my;i++) // analyze every 11025 sample blocks and determine their { // minimum and maximum values // we use the "readFloat()" method of the "soundData" ByteArray object to retrieve the raw sound // data for the left and right channels. When you call "readFloat()" it retrieves the next 4 bytes // from the ByteArray object, converts them to a 32-bit single precision floating point number and // moves the file pointer to the next sequence of 4 bytes (not explained in the Adobe Flash Documentation) var leftChannel:Number = soundData.readFloat(); // read raw sound data for left channel (4 bytes/32 bits) var rightChannel:Number = soundData.readFloat();// read raw sound data for right channel (next 4 bytes/32 bits) // 4 bytes + 4 bytes = 8 bytes = 1 sample block, remember? :) // check if we have a new minumum or maximum values for the left or right channels if (leftChannel < leftMin) leftMin = leftChannel; if (leftChannel > leftMax) leftMax = leftChannel; if (rightChannel < rightMin) rightMin = rightChannel; if (rightChannel > rightMax) rightMax = rightChannel; } // draw lines connecting the minimum and maximum values of the left and right channels // to their corresponding sprites. waveformLeft.graphics.lineTo(xPos,leftMin*yRatio); waveformRight.graphics.lineTo(xPos,rightMin*yRatio); xPos += xStep; waveformLeft.graphics.lineTo(xPos,leftMax*yRatio); waveformRight.graphics.lineTo(xPos,rightMax*yRatio); xPos += xStep; } // at this point the waveforms have been drawn to our left channel and right channel sprites. // it's time to position these sprites relative to the Stage and add them to the Stage as well. waveformLeft.x = 0; waveformLeft.y = stage.stageHeight/2; waveformRight.x = 0; waveformRight.y = stage.stageHeight/2; stage.addChild(waveformLeft); stage.addChild(waveformRight); // save in file var raw:BitmapData = new BitmapData(stage.stageWidth, stage.stageHeight); raw.draw(stage);
//кстати, чтоб этот энкодер заработал нужно было в туже папку где лежит и fla файл подкинуть библеотеку флешовскую https://github.com/mikechambers/as3corelib/ var jpgEncoder:JPGEncoder = new JPGEncoder(100); var jpgStream:ByteArray = jpgEncoder.encode(raw); //var fil:FileReference = new FileReference(); //fil.save(jpgStream, "image.jpg"); }
sound.addEventListener(Event.COMPLETE, loadComplete); sound.load(new URLRequest("test6.mp3"));
|
|