Экран Xuggler Код записи, влияющий на запись звука
Я работаю над приложением для скринкаста со звуком. запись экрана со звуком работает нормально, но проблема в том, что предположим, что я делаю запись 5 минут, затем сгенерированный видеофайл занимает 5 минут, а сгенерированный аудиофайл - 4 минуты 45 секунд. Таким образом, в основном проблема заключается в том, что аудио и видео не синхронизированы, продолжительность аудиофайла меньше по сравнению с видеофайлом.
Запись аудио и видео файлов выполняется в отдельном потоке, но все же что-то не так.
Код VideoCapturing:
public void run() {
setVideoParameters();
FRAME_RATE = frameRate;
// let's make a IMediaWriter to write the file.
writer = ToolFactory.makeWriter(movieFile.getName());
screenBounds = new Rectangle(RecorderSettings.m_CapRectX,
RecorderSettings.m_CapRecY,
(int) RecorderSettings.m_CapRectWidth,
(int) RecorderSettings.m_CapRecHeight);
// We tell it we're going to add one video stream, with id 0,
// at position 0, and that it will have a fixed frame rate of
// FRAME_RATE.
// ScreenWidth && ScreenHeight multiplied by 3/4 to reduce pixel to 3/4
// of actual.
// writer.addVideoStream(0, 0, ICodec.ID.CODEC_ID_MPEG4,
// screenBounds.width , screenBounds.height );
writer.addVideoStream(0, 0, vcodec.getID(),
(screenBounds.width * upperLimit) / lowerLimit,
(screenBounds.height * upperLimit) / lowerLimit);
// To have start time of recording
startTime = System.nanoTime();
while (isStopProceesBtnClk) {
try {
if (!isStopProceesBtnClk) {
break;
} else {
synchronized (this) {
while (isPauseProceesBtnClk) {
try {
// catches starting time of pause.
pauseStartTime = System.nanoTime();
wait();
} catch (Exception e) {
e.printStackTrace();
}
}
}
BufferedImage screen = getDesktopScreenshot();
// convert to the right image type
BufferedImage bgrScreen = convertToType(screen, BufferedImage.TYPE_3BYTE_BGR);
// encode the image to stream #0
if (totalPauseTime > 0) {
writer.encodeVideo(0, bgrScreen, (System.nanoTime() - startTime)- totalPauseTime, TimeUnit.NANOSECONDS);
} else {
writer.encodeVideo(0, bgrScreen, System.nanoTime() - startTime, TimeUnit.NANOSECONDS);
}
// sleep for frame rate milliseconds
try {
Thread.sleep((long) (1000 / FRAME_RATE));
} catch (InterruptedException e) {
// ignore
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
try {
writer.close();
writer = null;
Runtime.getRuntime().gc();
} catch (Exception e) {
// ignore errors
}
// tell the writer to close and write the trailer if needed
}
public static BufferedImage convertToType(BufferedImage sourceImage, int targetType) {
BufferedImage image;
// if the source image is already the target type, return the source
// image
if (sourceImage.getType() == targetType) {
image = sourceImage;
}
// otherwise create a new image of the target type and draw the new
// image
else {
image = new BufferedImage(sourceImage.getWidth(), sourceImage.getHeight(), targetType);
if (true) {
int x = MouseInfo.getPointerInfo().getLocation().x - 25;
int y = MouseInfo.getPointerInfo().getLocation().y - 37;
Graphics2D graphics2D = sourceImage.createGraphics();// getGraphics().drawImage(m_MouseIcon,
// x, y, 48, 48, null);
graphics2D.drawImage(SimpleWebBrowserExample.m_MouseIcon, x, y,
48, 48, null);
}
image.getGraphics().drawImage(sourceImage, 0, 0, null);
}
return image;
}
private BufferedImage getDesktopScreenshot() {
try {
// Robot captures screen shot
Robot robot = new Robot();
Rectangle captureSize = new Rectangle(screenBounds);
return robot.createScreenCapture(captureSize);
} catch (AWTException e) {
e.printStackTrace();
return null;
}
}
Код аудио-захвата:
public void run() {
init();
DataLine.Info info = new DataLine.Info(TargetDataLine.class,audioFormat,(int) (m_AudioFreq * sampleSizeInBytes));
try
{
m_TargetLine = (TargetDataLine) AudioSystem.getLine(info);
m_TargetLine.open(audioFormat, info.getMaxBufferSize());
}
catch(Exception exp){
exp.printStackTrace();
}
AudioFileFormat.Type targetType = AudioFileFormat.Type.WAVE;
try
{
m_outputFile = new File(bufferFileName);
while (m_outputFile.exists() && !m_outputFile.delete())
{
m_outputFile = BerylsUtility.getNextFile(m_outputFile);
}
FileOutputStream outFileStream = new FileOutputStream(m_outputFile);
audioOutStream = new BufferedOutputStream(outFileStream,memoryBufferSize);
}
catch (FileNotFoundException fe){
System.out.println("FileNotFoundException in VoiceCapturing.java :: " + fe);
}
catch (OutOfMemoryError oe){
System.out.println("OutOfMemoryError in VoiceCapturing.java " + oe);
}
while (isStopProceesBtnClk) {
try {
if (!isStopProceesBtnClk) {
break;
} else {
synchronized (this) {
while (isPauseProceesBtnClk) {
try {
wait();
} catch (Exception e) {
e.printStackTrace();
}
}
}
try
{
m_TargetLine.start();
int cnt = m_TargetLine.read(tempBuffer,0,tempBuffer.length);
if(cnt > 0){
audioOutStream.write(tempBuffer,0,cnt);
}
}
catch (Exception e){
System.out.println("Exception in VoiceCapturing.java :: " + e);
}
/*finally{
finish();
}*/
}
} catch (Exception e) {
e.printStackTrace();
}
}
finish();
}
public synchronized void finish()
{
try
{
System.out.println("AudioFinish");
audioOutStream.close();
FileInputStream audioInAgain = new FileInputStream(m_outputFile);
long sampleBytes = m_outputFile.length();
long sizeOfFrame = (long) m_SampleRate * m_Channels / 8;
BufferedInputStream buffAudioIn = new BufferedInputStream(audioInAgain, memoryBufferSize);
AudioInputStream a_input = new AudioInputStream(buffAudioIn, audioFormat, sampleBytes / sizeOfFrame);
while (m_AudioFile.exists() && !m_AudioFile.canWrite())
{
m_AudioFile = BerylsUtility.getNextFile(m_AudioFile);
}
AudioSystem.write(a_input, m_targetType, m_AudioFile);
buffAudioIn.close();
m_outputFile.delete();
}
catch (Exception e)
{
e.printStackTrace();
}
}
может кто-нибудь направить меня в этом... Спасибо.