Java录制视频_TylerZhong_qwer



import javax.imageio.ImageIO;
import javax.sound.sampled.*;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.awt.event.*;
import java.awt.image.BufferedImage;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.FileChannel.MapMode;
import java.util.*;
import java.util.List;
import java.util.concurrent.*;
import java.util.zip.Deflater;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;

/**
* java录像小程序: 视频文件保存在系统临时目录, 回车全屏播放, ESC退出全屏播放
*
*/
@SuppressWarnings("serial")
public class MainFrame extends JFrame {

private static final int WIDTH = 200;
private static final int HEIGHT = 70;
private static final Color BUTTON_COLOR = new Color(155, 183, 214);
private static final Color BUTTON_FOCUS_COLOR = new Color(87, 156, 242);
private static final Color BUTTON_DISABLE_COLOR = new Color(180, 207, 229);
private static final Font BUTTON_FONT = new Font("华文行楷", Font.BOLD, 15);
private static final String DEFAULT_FILE_PATH = System.getProperty("java.io.tmpdir");

private static Dimension screenSize;

private final JButton start = new JButton("开始");
private final JButton pause = new JButton("暂停");
private final JButton restart = new JButton("继续");
private final JButton stop = new JButton("完成");
private final JButton play = new JButton("播放");
private final JButton close = new JButton("退出");
private ScreenRecorder sr;

private String currentFilePath;

public MainFrame() {
setUndecorated(true);
setSize(WIDTH, HEIGHT);

Toolkit tk = Toolkit.getDefaultToolkit();
screenSize = tk.getScreenSize();
setLocation((int) screenSize.getWidth() - WIDTH, (int) (screenSize.getHeight() - tk.getScreenInsets(getGraphicsConfiguration()).bottom - HEIGHT));

MouseListener buttonMouseListener = new MouseAdapter() {
public void mouseEntered(MouseEvent e) {
JButton btn = (JButton) e.getSource();
if(btn.isEnabled()) {
btn.setBackground(BUTTON_FOCUS_COLOR);
}
}
public void mouseExited(MouseEvent e) {
JButton btn = (JButton) e.getSource();
if(btn.isEnabled()) {
btn.setBackground(BUTTON_COLOR);
}
}
};
start.addMouseListener(buttonMouseListener);
pause.addMouseListener(buttonMouseListener);
restart.addMouseListener(buttonMouseListener);
stop.addMouseListener(buttonMouseListener);
play.addMouseListener(buttonMouseListener);
close.addMouseListener(buttonMouseListener);

ChangeListener changeListener = new ChangeListener() {
public void stateChanged(ChangeEvent e) {
JButton btn = (JButton) e.getSource();
if(btn.isEnabled()) {
btn.setBackground(BUTTON_COLOR);
} else {
btn.setBackground(BUTTON_DISABLE_COLOR);
}
}
};
start.addChangeListener(changeListener);
pause.addChangeListener(changeListener);
restart.addChangeListener(changeListener);
stop.addChangeListener(changeListener);
play.addChangeListener(changeListener);

start.setFont(BUTTON_FONT);
pause.setFont(BUTTON_FONT);
restart.setFont(BUTTON_FONT);
stop.setFont(BUTTON_FONT);
play.setFont(BUTTON_FONT);
close.setFont(BUTTON_FONT);

start.setBackground(BUTTON_COLOR);
pause.setBackground(BUTTON_DISABLE_COLOR);
restart.setBackground(BUTTON_DISABLE_COLOR);
stop.setBackground(BUTTON_DISABLE_COLOR);
play.setBackground(BUTTON_DISABLE_COLOR);
close.setBackground(BUTTON_COLOR);

pause.setEnabled(false);
restart.setEnabled(false);
stop.setEnabled(false);
play.setEnabled(false);

start.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
sr = new ScreenRecorder();
boolean successFlag = sr.creataFile();
if(successFlag){
try {
start.setEnabled(false);
// pause.setEnabled(true);
stop.setEnabled(true);
sr.record();
} catch (Exception exception) {
exception.printStackTrace();
}
}
// currentFilePath =/**/ DEFAULT_FILE_PATH + System.currentTimeMillis();
// currentFilePath = "E:\\aa\\" + System.currentTimeMillis()+".mp4";
// RecordHelper.start(currentFilePath);
}
});
pause.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
pause.setEnabled(false);
restart.setEnabled(true);
stop.setEnabled(false);

RecordHelper.pause();
}
});
restart.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
pause.setEnabled(true);
restart.setEnabled(false);
stop.setEnabled(true);

RecordHelper.restart();
}
});
stop.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
start.setEnabled(true);
pause.setEnabled(false);
stop.setEnabled(false);
play.setEnabled(true);
try {
sr.stopRecord();
} catch (InterruptedException interruptedException) {
interruptedException.printStackTrace();
}
// RecordHelper.stop();
}
});
//取消播放监听
/*play.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
setVisible(false);

new PlayerFrame();
}
});*/
close.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
System.exit(0);
}
});

setLayout(new GridLayout(1, 3));
add(start);
// add(pause);
// add(restart);
add(stop);
// add(play);
add(close);

setVisible(true);
}

class PlayerFrame extends JFrame {

private MyVideo mv;
private final ExecutorService player;
private volatile BufferedImage lastImage;

public PlayerFrame() {
player = Executors.newSingleThreadExecutor();
player.execute(new Runnable() {
public void run() {
mv = new MyVideo(new File(currentFilePath), false, (byte) 0);
mv.position(0);
}
});
player.execute(new Runnable() {
public void run() {
for(;;) {
Object[] data = mv.nextFrame();
if(0 == data.length) {
player.shutdown();
AudioHelper.flush();
dispose();
MainFrame.this.setVisible(true);
play.setEnabled(true);
return;
}

lastImage = (BufferedImage) data[0];
repaint();

byte[] audioData = (byte[]) data[1];
AudioHelper.writeData(audioData);
}
}
});

setUndecorated(true);
setBounds((int) (screenSize.getWidth() / 8), (int) (screenSize.getHeight() / 8), (int) (screenSize.getWidth() / 4 * 3), (int) (screenSize.getHeight() / 4 * 3));

final JPanel canvas = new JPanel() {
protected void paintComponent(Graphics g) {
super.paintComponent(g);

if(null != lastImage) {
int w = lastImage.getWidth();
int h = lastImage.getHeight();

if(w * getHeight() > h * getWidth()) {
g.drawImage(lastImage, 0, (int) (getHeight() - getWidth() * 1.0 / w * h) / 2, getWidth(), (int) (getWidth() * 1.0 / w * h), null);
} else {
g.drawImage(lastImage, (int) (getWidth() - getHeight() * 1.0 / h * w) / 2, 0, (int) (getHeight() * 1.0 / h * w), getHeight(), null);
}
}

Point mouseLocation = MouseInfo.getPointerInfo().getLocation();
if(new Rectangle(PlayerFrame.this.getLocation().x, PlayerFrame.this.getLocation().y + getHeight() - 70, getWidth(), 50).contains(mouseLocation)) {
Color c = g.getColor();
g.setColor(new Color(0, 0, 100, 60));
g.fill3DRect(0, getHeight() - 70, getWidth(), 50, true);

g.setColor(new Color(97, 211, 225, 120));
g.fill3DRect(mouseLocation.x - PlayerFrame.this.getX() - 1, getHeight() - 70, 3, 50, true);

g.fill3DRect(0, getHeight() - 70, mv.position() * getWidth() / mv.length(), 50, true);

g.setColor(c);
}
}

};
canvas.addMouseMotionListener(new MouseAdapter() {
public void mouseMoved(MouseEvent e) {
repaint();
}
});
canvas.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
if(new Rectangle(PlayerFrame.this.getLocation().x, PlayerFrame.this.getLocation().y + getHeight() - 70, getWidth(), 50).contains(e.getLocationOnScreen()) && null != mv) {
mv.position((e.getLocationOnScreen().x - PlayerFrame.this.getLocation().x) * mv.length() / getWidth());
}
}
});
canvas.setBackground(Color.BLACK);
add(canvas);

addKeyListener(new KeyAdapter() {
public void keyReleased(KeyEvent e) {
if(e.getKeyCode() == KeyEvent.VK_ENTER) {
PlayerFrame.this.setLocation(0, 0);
PlayerFrame.this.setSize(screenSize);
} else if(e.getKeyCode() == KeyEvent.VK_ESCAPE) {
PlayerFrame.this.setBounds((int) (screenSize.getWidth() / 8), (int) (screenSize.getHeight() / 8), (int) (screenSize.getWidth() / 4 * 3), (int) (screenSize.getHeight() / 4 * 3));
}
}
});
setVisible(true);
}
}

public static void main(String[] args) throws Exception {
new MainFrame();
}
}

/**
* 对应一个视频文件, 视频文件数据格式如下:
* {
* 帧列表 : [
* 帧信息 : {
* nbyte 图片数据
* nbyte 音频数据
* }
* ]
* 索引列表 : [
* 索引信息 : {
* 1byte 帧标识: 增量01010101|全量10101010
* 2byte 音频数据压缩大小
* 3byte 图片数据压缩大小
* 4byte 帧位置
* }
* ]
* 索引列表位置 : 4byte
* 每秒帧数 : 1byte
* }
*
*/
class MyVideo implements Closeable {

private static final String HEADER = "作者:dgqjava qq群:326337220";

private final byte[] header = new byte[30];
private final byte rate;
private final List<FrameIndex> indexList = new ArrayList<FrameIndex>();
private volatile int frameIndex;
private volatile BufferedImage last;
private volatile int[] oldRgbData;
private long lastFrameTime = -1;

private final boolean isCreate;
private final RandomAccessFile raf;
private final FileChannel fileChannel;
private final MappedByteBuffer mbb;

public MyVideo(File file, boolean isCreate, byte rate) {
try {
this.isCreate = isCreate;
if(isCreate) {
this.rate = rate;
mbb = null;
file.createNewFile();
raf = new RandomAccessFile(file, "rw");
fileChannel = raf.getChannel();
ByteBuffer bb = ByteBuffer.wrap(HEADER.getBytes("UTF-8"));
while(bb.hasRemaining()) {
fileChannel.write(bb);
}
} else {
raf = new RandomAccessFile(file, "r");
fileChannel = raf.getChannel();
mbb = fileChannel.map(MapMode.READ_ONLY, 0, fileChannel.size());
mbb.get(header, 0, header.length);
if(!HEADER.equals(new String(header, "UTF-8"))) {
throw new RuntimeException("文件格式错误");
}

mbb.position(mbb.capacity() - 5);

byte[] bs = new byte[4];
mbb.get(bs);
int indexIndex = ByteBuffer.wrap(bs).getInt();
this.rate = mbb.get();
mbb.position(indexIndex);

byte[] indexByte = new byte[10];
while(mbb.position() < mbb.capacity() - 5) {
mbb.get(indexByte);
indexList.add(FrameIndex.fromByteArray(indexByte));
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}

public synchronized void position(int newFrameIndex) {
try {
frameIndex = newFrameIndex;
int lastFullFrameIndex;
for(lastFullFrameIndex = frameIndex; lastFullFrameIndex >= 0; lastFullFrameIndex--) {
if(indexList.get(lastFullFrameIndex).isFull) {
break;
}
}

Frame frame = getFrame(lastFullFrameIndex);
last = ImageIO.read(new ByteArrayInputStream(ZipHelper.unZip(frame.getPicData()).toByteArray()));
oldRgbData = ImageHelper.getRgb(last);
for(int i = lastFullFrameIndex + 1; i <= frameIndex; i++) {
frame = getFrame(i);
int[] rgb = ImageHelper.byteArray2RgbArray(ZipHelper.unZip(frame.getPicData()).toByteArray());
ImageHelper.setRgbArrayChange(last, oldRgbData, rgb);
}
} catch (Exception e) {
e.printStackTrace();
}
}

public int position() {
return frameIndex;
}

public int length() {
return indexList.size();
}

public synchronized Object[] nextFrame() {
try {
Frame frame = getFrame(++frameIndex);
if(null == frame) {
return new Object[]{};
}
byte[] audioData = ZipHelper.unZip(frame.getAudioData()).toByteArray();
int[] rgb = null;
if(frame.getFlag() == Frame.FULL) {
last = ImageIO.read(new ByteArrayInputStream(ZipHelper.unZip(frame.getPicData()).toByteArray()));
} else {
rgb = ImageHelper.byteArray2RgbArray(ZipHelper.unZip(frame.getPicData()).toByteArray());
}
audioData = ZipHelper.unZip(frame.getAudioData()).toByteArray();

if(-1 != lastFrameTime) {
long period = System.currentTimeMillis() - lastFrameTime;
if(period < 1000 / rate) {
Thread.sleep(1000 / rate - period);
}
}
if(null != rgb) {
ImageHelper.setRgbArrayChange(last, oldRgbData, rgb);
}
lastFrameTime = System.currentTimeMillis();

return new Object[] {last, audioData};
} catch (Exception e) {
throw new RuntimeException(e);
}
}

public void addFrame(Frame frame) {
try {
FrameIndex fi = new FrameIndex(frame.getFlag() == Frame.FULL, frame.getAudioData().length, frame.getPicData().length, (int) fileChannel.position());
indexList.add(fi);
write(frame.getPicData());
write(frame.getAudioData());
} catch (IOException e) {
e.printStackTrace();
}
}

public Frame getFrame(int index) {
if(index == indexList.size()) {
return null;
}
FrameIndex frameIndex = indexList.get(index);
byte[] picData = new byte[frameIndex.picDataSize];
byte[] audioData = new byte[frameIndex.audioDataSize];
mbb.position(frameIndex.index);
mbb.get(picData);
mbb.get(audioData);
return new Frame(frameIndex.isFull ? Frame.FULL : Frame.INCREMENTAL, picData, audioData);
}

public void close() {
try {
if(isCreate) {
int indexListIndex = (int) fileChannel.position();
for(FrameIndex fi : indexList) {
write(fi.toByteArray());
}
write(ByteBuffer.allocate(4).putInt(indexListIndex).array());
write(new byte[] {rate});
}
fileChannel.force(false);
raf.close();
fileChannel.close();
} catch (IOException e) {
e.printStackTrace();
}
}

private void write(byte[] data) throws IOException {
ByteBuffer bb = ByteBuffer.wrap(data);
while(bb.hasRemaining()) {
fileChannel.write(bb);
}
}

static class FrameIndex {
private final boolean isFull;
private final int audioDataSize;
private final int picDataSize;
private final int index;

public FrameIndex(boolean isFull, int audioDataSize, int picDataSize, int index) {
this.isFull = isFull;
this.audioDataSize = audioDataSize;
this.picDataSize = picDataSize;
this.index = index;
}

public boolean isFull() {
return isFull;
}

public int getAudioDataSize() {
return audioDataSize;
}

public int getPicDataSize() {
return picDataSize;
}

public int getIndex() {
return index;
}

public byte[] toByteArray() {
byte[] result = new byte[10];
result[0] = (isFull ? Frame.FULL : Frame.INCREMENTAL);
ByteBuffer.allocate(4).putInt(audioDataSize).array();
System.arraycopy(ByteBuffer.allocate(4).putInt(audioDataSize).array(), 2, result, 1, 2);
System.arraycopy(ByteBuffer.allocate(4).putInt(picDataSize).array(), 1, result, 3, 3);
System.arraycopy(ByteBuffer.allocate(4).putInt(index).array(), 0, result, 6, 4);
return result;
}

public static FrameIndex fromByteArray(byte[] bs) {
boolean isFull = (Frame.FULL == bs[0]);
byte[] temp = new byte[8];
System.arraycopy(bs, 1, temp, 6, 2);
int audioDataSize = (int) ByteBuffer.wrap(temp).getLong();
System.arraycopy(bs, 3, temp, 5, 3);
int picDataSize = (int) ByteBuffer.wrap(temp).getLong();
System.arraycopy(bs, 6, temp, 4, 4);
int index = (int) ByteBuffer.wrap(temp).getLong();

return new FrameIndex(isFull, audioDataSize, picDataSize, index);
}
}

static class Frame {
public static final Frame TERMINATE_FRAME = new Frame((byte) 0, null, null);
public static final byte FULL = 0;
public static final byte INCREMENTAL = 1;

private final byte flag;
private final byte[] picData;
private final byte[] audioData;

public Frame(byte flag, byte[] picData, byte[] audioData) {
this.flag = flag;
this.picData = picData;
this.audioData = audioData;
}

public byte getFlag() {
return flag;
}

public byte[] getPicData() {
return picData;
}

public byte[] getAudioData() {
return audioData;
}
}
}

class RecordHelper {

private static final byte RECORD_RATE = 5;
private static final int DEFAULT_FULL_PIC_RATE = 80;
private static final int FULL_PIC_PERIOD = 60;

private static final Rectangle rectangle = new Rectangle(Toolkit.getDefaultToolkit().getScreenSize());
private static ScheduledExecutorService screenCapture;
private static ScheduledExecutorService audioProcesser;
private static final ExecutorService ioProcesser = Executors.newSingleThreadExecutor();
private static final ExecutorService dataProcesser = Executors.newSingleThreadExecutor();

private static final BlockingQueue<Object[]> screenQueue = new LinkedBlockingQueue<Object[]>();
private static final BlockingQueue<byte[]> audioQueue = new LinkedBlockingQueue<byte[]>();
private static final BlockingQueue<MyVideo.Frame> frameQueue = new LinkedBlockingQueue<MyVideo.Frame>();

private static volatile boolean generateFull = false;

public static void start(final String filePath) {
generateFull = true;
restart();

dataProcesser.execute(new Runnable() {
public void run() {
try {
double lastFullPicTime = -1;
int[] latestRgb = null;

for(;;) {
byte[] picData;
byte flag;
Object[] imageData = screenQueue.take();
byte[] audioData = audioQueue.take();

if(generateFull) {
latestRgb = ImageHelper.getRgb((BufferedImage) imageData[1]);
picData = ZipHelper.zip(ImageHelper.image2Stream((BufferedImage) imageData[1]).toByteArray()).toByteArray();
flag = MyVideo.Frame.FULL;
lastFullPicTime = (Long) imageData[0];
frameQueue.put(new MyVideo.Frame(flag, picData, ZipHelper.zip(audioData).toByteArray()));
generateFull = false;
continue;
}

int[] temp = latestRgb.clone();
double rate = DEFAULT_FULL_PIC_RATE - ((((Long) imageData[0]) - lastFullPicTime) * DEFAULT_FULL_PIC_RATE / 1000 / FULL_PIC_PERIOD);
byte[] changeData = ImageHelper.getByteArrayChange(temp, (BufferedImage) imageData[1], rate);
if(null != changeData) {
latestRgb = temp;
picData = ZipHelper.zip(changeData).toByteArray();
flag = MyVideo.Frame.INCREMENTAL;
} else {
latestRgb = ImageHelper.getRgb((BufferedImage) imageData[1]);
picData = ZipHelper.zip(ImageHelper.image2Stream((BufferedImage) imageData[1]).toByteArray()).toByteArray();
flag = MyVideo.Frame.FULL;
lastFullPicTime = (Long) imageData[0];
}
frameQueue.put(new MyVideo.Frame(flag, picData, ZipHelper.zip(audioData).toByteArray()));
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
});

ioProcesser.execute(new Runnable() {
public void run() {
MyVideo video = new MyVideo(new File(filePath), true, RECORD_RATE);
try {
for(;;) {
MyVideo.Frame frame = frameQueue.take();
if(frame == MyVideo.Frame.TERMINATE_FRAME) {
return;
}
video.addFrame(frame);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
video.close();
}
}
});
}

public static void pause() {
screenCapture.shutdownNow();
audioProcesser.shutdownNow();
}

public static void restart() {
screenCapture = Executors.newSingleThreadScheduledExecutor();
audioProcesser = Executors.newSingleThreadScheduledExecutor();

screenCapture.scheduleAtFixedRate(new Runnable() {
public void run() {
try {
screenQueue.put(new Object[] {System.currentTimeMillis(), ImageHelper.getScreen(rectangle)});
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}, 0, 1000 / RECORD_RATE, TimeUnit.MILLISECONDS);

final int bufferSize = AudioHelper.getBufferSize();
byte[] data = new byte[bufferSize];
AudioHelper.readData(data);
audioProcesser.scheduleAtFixedRate(new Runnable() {
public void run() {
try {
byte[] data = new byte[bufferSize];
int count = AudioHelper.readData(data);
audioQueue.put(Arrays.copyOf(data, count));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}, 0, 1000 / RECORD_RATE, TimeUnit.MILLISECONDS);
}

public static void stop() {
try {
pause();
frameQueue.put(MyVideo.Frame.TERMINATE_FRAME);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}

final class ImageHelper {

private static final Robot robot;
private static final int DEFAULT_RGB = (255 << 24) | (0 << 16) | (0 << 8) | (0 << 0);
private static final String DEFAULT_FORMAT_NAME = "BMP";

static {
try {
robot = new Robot();
} catch (AWTException e) {
throw new RuntimeException(e);
}
}

public static BufferedImage getScreen(Rectangle rectangle) {
return robot.createScreenCapture(rectangle);
}

/**
* 获取图片的rgb数组
* @param images
* @return
*/
public static int[] getRgb(BufferedImage images) {
int w = images.getWidth();
int h = images.getHeight();

int[] result = new int[w * h];
for(int i = 0; i < h; i++) {
for(int j = 0; j < w; j++) {
result[i * w + j] = images.getRGB(j, i);
}
}

return result;
}

/**
* 获取图片像素对比后差异的rgb数组, 并将新图片的变化部分更新到原图片的rgb数组中
* @param oldRgbData 对比的原图片的rgb数组
* @param currentImage 新图片
* @return
*/
public static int[] getRgbChange(int[] oldRgbData, BufferedImage currentImage) {
int w = currentImage.getWidth();
int h = currentImage.getHeight();

int[] result = new int[w * h];
for(int i = 0; i < h; i++) {
for(int j = 0; j < w; j++) {
int index = i * w + j;
int oldRgb = oldRgbData[index];
int newRgb = currentImage.getRGB(j, i);

int rgb;
if(oldRgb != newRgb) {
rgb = (DEFAULT_RGB == newRgb ? oldRgb : newRgb);
result[index] = rgb;
} else {
rgb = DEFAULT_RGB;
result[index] = rgb;
}
oldRgbData[index] = rgb;
}
}

return result;
}

/**
* 获取图片像素对比后差异的rgb数组并压缩生成byte数组, 并将新图片的变化部分更新到原图片的rgb数组中
* @param oldRgbData
* @param currentImage
* @param allowPercent 当像素的变化比例超过这个值时放弃生成byte数组, 返回null
* @return
*/
public static byte[] getByteArrayChange(int[] oldRgbData, BufferedImage currentImage, double allowPercent) {
try {
int h = currentImage.getHeight();
int w = currentImage.getWidth();

int total = w * h;
int changeCount = 0;
ByteBuffer buffer = ByteBuffer.allocate(4);
int samePointCount = 0;
List<Integer> chunkCountList = new ArrayList<Integer>();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
for(int i = 0; i < h; i++) {
for(int j = 0; j < w; j++) {
int index = i * w + j;
int oldRgb = oldRgbData[index];
int newRgb = currentImage.getRGB(j, i);

int rgb;
if(oldRgb != newRgb) {
if(changeCount++ * 100D / total >= allowPercent) {
return null;
}
oldRgbData[index] = newRgb;
rgb = (DEFAULT_RGB == newRgb ? oldRgb : newRgb);
} else {
rgb = DEFAULT_RGB;
}

if(DEFAULT_RGB == rgb) {
if(samePointCount >= 0) {
samePointCount++;
} else {
chunkCountList.add(samePointCount);
samePointCount = 1;
}
} else {
if(samePointCount <= 0) {
samePointCount--;
} else {
chunkCountList.add(samePointCount);
samePointCount = -1;
}
buffer.clear();
baos.write(buffer.putInt(rgb).array());
}
}
}
chunkCountList.add(samePointCount);

byte[] rgbData = baos.toByteArray();
byte[] result = new byte[(chunkCountList.size() + 1) * 4 + rgbData.length];
buffer.clear();
int index = 0;
System.arraycopy(buffer.putInt(chunkCountList.size()).array(), 0, result, index, 4);
for(int chunkSize : chunkCountList) {
buffer.clear();
System.arraycopy(buffer.putInt(chunkSize).array(), 0, result, index += 4, 4);
}
System.arraycopy(rgbData, 0, result, index + 4, rgbData.length);
return result;
} catch (IOException e) {
throw new RuntimeException(e);
}
}

/**
* 设置图片像素对比后差异的rgb数组, 并将变化部分更新到原图片的rgb数组中
* @param lastImage
* @param oldRgbData
* @param rgbChangeData
*/
public static void setRgbArrayChange(BufferedImage lastImage, int[] oldRgbData, int[] rgbChangeData) {
int h = lastImage.getHeight();
int w = lastImage.getWidth();

for(int i = 0; i < h; i++) {
for(int j = 0; j < w; j++) {
int index = i * w + j;
int newRgb = rgbChangeData[index];

if(DEFAULT_RGB != newRgb) {
int oldRgb = oldRgbData[index];
int rgb;
if(oldRgb != newRgb) {
rgb = newRgb;
lastImage.setRGB(j, i, rgb);
} else {
rgb = DEFAULT_RGB;
lastImage.setRGB(j, i, rgb);
}
oldRgbData[index] = rgb;
}
}
}
}

public static void setRgbArray(BufferedImage image, int[] rgbChangeData) {
int h = image.getHeight();
int w = image.getWidth();

for(int i = 0; i < h; i++) {
for(int j = 0; j < w; j++) {
int newRgb = rgbChangeData[i * w + j];

if(DEFAULT_RGB != newRgb) {
image.setRGB(j, i, newRgb);
}
}
}
}

public static ByteArrayOutputStream image2Stream(BufferedImage image) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(image, DEFAULT_FORMAT_NAME, baos);
return baos;
} catch (IOException e) {
throw new RuntimeException(e);
}
}

/**
* 压缩后的byte数组还原为rgb数组
* @param data
* @return
*/
public static int[] byteArray2RgbArray(byte[] data) {
ByteBuffer buffer = ByteBuffer.allocate(4);
buffer.clear();
buffer.put(new byte[] {data[0], data[1], data[2], data[3]});
buffer.flip();
int chunkCount = buffer.getInt();
List<Integer> chunkCountList = new ArrayList<Integer>(chunkCount);
int index = 4;
int rgbCount = 0;
for(int i = 0; i < chunkCount; i++) {
buffer.clear();
buffer.put((new byte[] {data[index + 0], data[index + 1], data[index + 2], data[index + 3]}));
buffer.flip();

int chunkSize = buffer.getInt();
chunkCountList.add(chunkSize);
rgbCount += Math.abs(chunkSize);
index += 4;
}

int[] rgbData = new int[rgbCount];
int rgbDataIndex = 0;
for(int chunkSize : chunkCountList) {
if(chunkSize > 0) {
for(int j = 0; j < chunkSize; j++) {
rgbData[rgbDataIndex++] = DEFAULT_RGB;
}
} else {
for(int j = 0; j > chunkSize; j--) {
buffer.clear();
buffer.put(new byte[] {data[index + 0], data[index + 1], data[index + 2], data[index + 3]});
buffer.flip();
rgbData[rgbDataIndex++] = buffer.getInt();
index += 4;
}
}
}

return rgbData;
}
}

final class AudioHelper {

private static final float RATE = 8000;
private static final int SAMPLE_SIZE_IN_BITS = 8;
private static final int CHANNELS = 1;
private static final AudioFormat DEFAULT_AUDIO_FORMAT = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, RATE, SAMPLE_SIZE_IN_BITS, CHANNELS, SAMPLE_SIZE_IN_BITS / 8 * CHANNELS, RATE, false);

private static SourceDataLine sourceDataLine;
private static TargetDataLine targetDataLine;

static {
try {
DataLine.Info targetInfo = new DataLine.Info(TargetDataLine.class, DEFAULT_AUDIO_FORMAT);
targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
targetDataLine.open(DEFAULT_AUDIO_FORMAT);
targetDataLine.start();

DataLine.Info sourceInfo = new DataLine.Info(SourceDataLine.class, DEFAULT_AUDIO_FORMAT);
sourceDataLine = (SourceDataLine) AudioSystem.getLine(sourceInfo);
sourceDataLine.open(DEFAULT_AUDIO_FORMAT);
sourceDataLine.start();
} catch (Exception e) {
throw new RuntimeException(e);
}
}

public static int getBufferSize() {
return targetDataLine.getBufferSize();
}

public static int readData(byte[] bs) {
return targetDataLine.read(bs, 0, targetDataLine.available());
}

public static void writeData(byte[] bs) {
sourceDataLine.write(bs, 0, bs.length);
}

public static void flush() {
sourceDataLine.flush();
}
}

final class ZipHelper {

public static ByteArrayOutputStream zip(byte[] data) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
ZipOutputStream zip = new ZipOutputStream(baos);
ZipEntry entry = new ZipEntry("zip");
entry.setSize(data.length);
zip.putNextEntry(entry);
zip.write(data);
zip.closeEntry();
zip.close();
} catch (Exception e) {
e.printStackTrace();
}
return baos;
}

public static ByteArrayOutputStream unZip(byte[] data) {
ByteArrayOutputStream baos = null;
try {
ByteArrayInputStream bis = new ByteArrayInputStream(data);
ZipInputStream zip = new ZipInputStream(bis);
if (zip.getNextEntry() != null) {
byte[] buf = new byte[1024];
int num;
baos = new ByteArrayOutputStream();
while ((num = zip.read(buf, 0, buf.length)) != -1) {
baos.write(buf, 0, num);
}
baos.flush();
baos.close();
}
zip.close();
bis.close();
} catch (Exception e) {
e.printStackTrace();
}
return baos;
}
}




/**
* 屏幕录像 录制成flv视频
*
*/
class ScreenRecorder {
private final ScheduledExecutorService screenRecorder = Executors.newSingleThreadScheduledExecutor(); // 截屏线程 负责截屏
private final ScheduledExecutorService audioRecorder = Executors.newSingleThreadScheduledExecutor(); // 音频处理线程 负责读写音频数据
private final ExecutorService videoRecorder = Executors.newSingleThreadExecutor(); // 视频处理线程 负责处理截屏数据
private final BlockingQueue<Object[]> screenQueue = new LinkedBlockingQueue<>(); // 待处理截屏数据队列 {时间戳, 图片}
private final FLV flv;
private final Robot robot;
private final Rectangle rectangle;
private volatile boolean video; // 录制视频
private volatile boolean audio; // 录制音频
private volatile long startTime;
private volatile boolean finish; // 结束录制
private ScreenRecorder sr;
private String storagePath; //用户选择的文件存储路径
private String uuidnameFileName; //临时存放.flv的视频路径

/**
* 录屏对象
* @param target 视频文件输出地址
* @param video 录制视频
* @param audio 录制音频
* @param rectangle 录制范围 为空则录制全屏
*/
public ScreenRecorder(){
flv = null;
robot = null;
rectangle = null;
}

public ScreenRecorder(OutputStream target, boolean video, boolean audio, Rectangle rectangle) {
if(null == rectangle) { // 默认录制全屏
rectangle = new Rectangle(Toolkit.getDefaultToolkit().getScreenSize());
}
this.rectangle = rectangle;
try {
robot = new Robot();
} catch (AWTException e) {
throw new RuntimeException();
}
this.video = video;
this.audio = audio;
flv = new FLV(target, (int) rectangle.getWidth(), (int) rectangle.getHeight());
}

/**
* 开始录制
*/
public void start() {
if(finish) {
return;
}
startTime = System.currentTimeMillis();
screenRecorder.scheduleAtFixedRate(() -> { // 截屏每秒12帧
long timestamp = System.currentTimeMillis();
Object[] data = new Object[] {timestamp, robot.createScreenCapture(rectangle)};
try {
screenQueue.put(data);
} catch (InterruptedException e) {
return;
}
}, 0, 1000 / 12, TimeUnit.MILLISECONDS);

byte[] buffer = new byte[AudioHelpers.getBufferSize()];
audioRecorder.scheduleAtFixedRate(() -> {
long timestamp = System.currentTimeMillis();
int count = AudioHelpers.readData(buffer);
flv.addAudio(timestamp - startTime, Arrays.copyOfRange(buffer, 0, count));
}, 0, 100, TimeUnit.MILLISECONDS);

videoRecorder.execute(() -> {
for(;;) {
try {
Object[] data = screenQueue.take();
long timestamp = (long) data[0];
BufferedImage img = (BufferedImage) data[1];
flv.addVideo(timestamp - startTime, img);
} catch (InterruptedException e) {
break;
}
}
});
}

/**
* 暂停录制
*/
public void suspend() {

}

/**
* 暂停录制屏幕
*/
public void suspendVideo() {
video = false;
}

/**
* 暂停录制声音
*/
public void suspendAudio() {
audio = false;
}

/**
* 继续录制
*/
public void resume() {

}

/**
* 继续录制屏幕
*/
public void resumeVideo() {
video = true;
}

/**
* 继续录制声音
*/
public void resumeAudio() {
audio = true;
}

/**
* 结束录制
*/
public void finish() {
finish = true;
screenRecorder.shutdownNow();
audioRecorder.shutdownNow();
videoRecorder.shutdownNow();
flv.save();
}

private String saveFile(){
JFileChooser dialog = new JFileChooser();
dialog.setDialogTitle("另存为");
dialog.setFileSelectionMode(JFileChooser.FILES_ONLY);
dialog.setDialogType(JFileChooser.SAVE_DIALOG);
dialog.setFileFilter(new TextFileFilter("*.mp4", "视频文件(*.mp4)"));
int result = dialog.showSaveDialog(dialog);
if(result == JFileChooser.APPROVE_OPTION){
File file = dialog.getSelectedFile();
String fileName = file.getAbsolutePath(); //得到文件全名
return fileName;
}
return null;
}

//计算字符串中出现某个字符串的个数
public int countString(String str,String s){
int count = 0;
while (str.indexOf(s)!=-1){
str = str.substring(str.indexOf(s) + 1, str.length());
count ++;
}
return count;
}

public boolean creataFile(){
System.out.println("视频只能存储为.mp4格式,请选择存储路径");
storagePath = saveFile();
if(storagePath==null){
File sourceFolder = new File("C:\\hsbcVideo");
if(!sourceFolder.exists()){ //判断此路径或文件是否存在
sourceFolder.isDirectory(); //判断这是不是一个文件夹
sourceFolder.mkdir();
storagePath = sourceFolder.getAbsolutePath();
}
return true;
}else{
if(storagePath.startsWith("C:\\") && countString(storagePath,"\\") <= 1){
System.out.println("error:不能存储在C盘根目录,客户端没有所需的特权,请重新选择路径");
return false;
}else {
boolean extension = storagePath.endsWith(".mp4");
if(!extension){
storagePath = storagePath+".mp4";
// System.out.println("error:视频格式只能为.mp4, 请重新输入");
return true;
}
}
}
return true;
}

/**
* 开始记录制视屏
* @throws Exception
*/
public void record() throws Exception {
if(storagePath==null){
uuidnameFileName = storagePath+"\\TylerZhong"+UUID.randomUUID().toString()+".flv";
sr = new ScreenRecorder(new BufferedOutputStream(new FileOutputStream(new File(uuidnameFileName))), true, true, null);
}else {
uuidnameFileName = storagePath.substring(0,storagePath.lastIndexOf(".mp4"))+UUID.randomUUID().toString()+".flv";
sr = new ScreenRecorder(new BufferedOutputStream(new FileOutputStream(new File(uuidnameFileName))), true, true, null);
}
sr.start();
}

/**
* 停止录制视频
* @throws InterruptedException
*/
public void stopRecord() throws InterruptedException {
// TimeUnit.SECONDS.sleep(timeout); // 录制十秒
sr.finish();
ConverVideoUtils converVideoUtils = new ConverVideoUtils();
if(storagePath==null){
converVideoUtils.beginConver(uuidnameFileName,storagePath);
}else{
converVideoUtils.beginConver(uuidnameFileName,storagePath);
}
storagePath = null;
uuidnameFileName = null;
}

public static void main(String[] args) throws Exception {
ScreenRecorder sr = new ScreenRecorder(new BufferedOutputStream(new FileOutputStream(new File("C:\\aaa\\tyler.flv"))), true, true, null);
sr.start();
TimeUnit.SECONDS.sleep(5); // 录制十秒
sr.finish();
ConverVideoUtils converVideoUtils = new ConverVideoUtils();
converVideoUtils.beginConver("C:\\aaa\\tyler.flv","C:\\aaa\\tyler.mp4");
}
}

class TextFileFilter extends javax.swing.filechooser.FileFilter {
private ArrayList<String> extensions = new ArrayList<String>();
private ArrayList<String> descriptions = new ArrayList<String>();
public TextFileFilter(){
super();
}
public TextFileFilter(String extension, String description) {
super();
this.extensions.add(extension);
this.descriptions.add(description);
}
@Override
public boolean accept(File pathname) {
if (pathname != null) {
if (pathname.isDirectory()) {
return true;
}
String extension = getExtension(pathname);
if (extension==null){
return false;
}
for(int i=0; i<extensions.size(); i++){
if(extensions.get(i).toLowerCase().endsWith(extension.toLowerCase())){
return true;
}
}
}
return false;
}
private String getExtension(File pathname) {
if (pathname != null) {
String filename = pathname.getName();
int i = filename.lastIndexOf('.');
if (i > 0 && i < filename.length() - 1) {
return filename.substring(i).toLowerCase();
}
}
return null;
}

@Override
public String getDescription() {
return descriptions.get(descriptions.size()-1);
}
}

/**
* flv文件 addAudio方法添加音频 addVideo方法添加视频 save方法保存文件
*
*/
class FLV {
// 相关参考文档
// http://web.archive.org/web/20100601113823/http://www.adobe.com/devnet/swf/pdf/swf_file_format_spec_v10.pdf
// https://www.ics.agh.edu.pl/dydaktyka/mm/lato0405_inf_d/wyklady/w4/SWF7_specification.pdf
// https://rtmp.veriskope.com/pdf/video_file_format_spec_v10.pdf

private static final int TYPE_FLAGS_AUDIO = 0B00000100; // 包含音频
private static final int TYPE_FLAGS_VIDEO = 0B00000001; // 包含视频

private static final int TAG_TYPE_AUDIO = 8; // 音频tag
private static final int TAG_TYPE_VIDEO = 9; // 视频tag
private static final int TAG_TYPE_SCRIPT = 18; // script tag

private static final int KEY_FRAME = 1; // 关键帧
private static final int INTER_FRAME = 2; // 过渡帧

private static final int SCREEN_VIDEO_CODEC = 3; // 视频编码类型
private static final int LINEAR_PCM_LITTLE_ENDIAN = 3; // 音频编码类型

private static final int BLOCK_SIZE = 32; // 分块大小 16的倍数
private static final int MAX_PIXEL = 4095; // flv最大像素 不超过4095 不要试图通过这个降低分辨率来压缩文件大小

private final BlockingQueue<byte[]> dataQueue = new LinkedBlockingQueue<>(); // 如果IO输出比添加帧慢 可能导致队列爆满 内存溢出
private final ExecutorService dataWriter = Executors.newSingleThreadExecutor(); // 输出线程 负责输出视频数据流
private final int width;
private final int height;
private final int keyFrameRate; // 多少帧一个关键帧 默认60
private int videoTagCount; // 当前视频帧数
private Map<String, int[]> lastFrame = new HashMap<>(); // 上一帧数据 x_y=blockRgb 用来做普通帧的数据对比
private volatile boolean finish; // 结束标志 不允许再写入数据

/**
* 创建一个FLV视频对象
* @param target 视频文件输出流
* @param width 视频分辨率: 宽
* @param height 视频分辨率: 高
*/
public FLV(OutputStream target, int width, int height) {
this(target, width, height, 60);
}

/**
* 创建一个FLV视频对象
* @param target 视频文件输出流
* @param width 视频分辨率: 宽
* @param height 视频分辨率: 高
* @param keyFrameRate 每多少帧插入一个关键帧
*/
public FLV(OutputStream target, int width, int height, int keyFrameRate) {
this.width = width;
this.height = height;
this.keyFrameRate = keyFrameRate;
dataWriter.execute(() -> {
for(;;) {
try {
target.write(dataQueue.take());
} catch (InterruptedException e) {
break;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
try {
target.close(); // 保存文件并退出
} catch (IOException e) {
throw new RuntimeException(e);
}
});
try {
dataQueue.put(new byte[] {
'F', 'L', 'V', // signature
1, // version
TYPE_FLAGS_AUDIO | TYPE_FLAGS_VIDEO, // typeFlags
0, 0, 0, 9, // dataOffset
0, 0, 0, 0 // previousTagSize
});
addScript(40);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}

/**
* 保存flv文件
*/
public void save() {
finish = true;
dataWriter.shutdownNow();
}

/**
* 添加画面
* @param timestamp 时间戳从0开始
* @param img 图片
*/
public void addVideo(long timestamp, BufferedImage img) {
int frameType;
if(videoTagCount++ % keyFrameRate == 0) {
// 关键帧
frameType = KEY_FRAME;
} else {
// 过渡帧
frameType = INTER_FRAME;
}

ByteArrayOutputStream baos = new ByteArrayOutputStream();
baos.write((frameType << 4) | SCREEN_VIDEO_CODEC); // frameType << 4 | codecID

byte[] videoData = getScreenVideoPacket(img, frameType == KEY_FRAME);
baos.write(videoData, 0, videoData.length); // videoData

addTag(TAG_TYPE_VIDEO, timestamp, baos.toByteArray());
}

/**
* 添加声音
* @param timestamp 时间戳从0开始
* @param audioData 音频数据
*/
public void addAudio(long timestamp, byte[] audioData) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
// soundFormat 4bit 3 Linear PCM little endian
// soundRate 2bit 0 5.5khz 1 11khz 2 22khz 3 44khz
// soundSize 1bit 0 8bit 1 16bit
// soundType 1bit 0 mono 1 stereo
baos.write((LINEAR_PCM_LITTLE_ENDIAN << 4) | 0B000000100);

// soundData
baos.write(audioData, 0, audioData.length);

addTag(TAG_TYPE_AUDIO, timestamp, baos.toByteArray());
}

/**
* 添加额外信息 没有这个tag视频也能正常播放
* @param duration 视频时长: 秒
*/
private void addScript(double duration) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
baos.write(new byte[] {
2, // AMF_DATA_TYPE_STRING
0, 10, // dataSize
111, 110, 77, 101, 116, 97, 68, 97, 116, 97, // onMetaData
8, // AMF_DATA_TYPE_MIXEDARRAY
0, 0, 0, 5, // arrayLength
});
// baos.write(new byte[] {
// 0, 8, // nameLength
// 100, 117, 114, 97, 116, 105, 111, 110, // name duration
// 0
// });
// baos.write(ByteBuffer.allocate(8).putDouble(duration).array());
baos.write(new byte[] {
0, 5, // nameLength
119, 105, 100, 116, 104, // name width
0
});
baos.write(ByteBuffer.allocate(8).putDouble(width).array());
baos.write(new byte[] {
0, 6, // nameLength
104, 101, 105, 103, 104, 116, // name height
0
});
baos.write(ByteBuffer.allocate(8).putDouble(height).array());
baos.write(new byte[] {
0, 12, // nameLength
118, 105, 100, 101, 111, 99, 111, 100, 101, 99, 105, 100, // videocodecid
0
});
baos.write(ByteBuffer.allocate(8).putDouble(SCREEN_VIDEO_CODEC).array());
baos.write(new byte[] {
0, 12, // nameLength
97, 117, 100, 105, 111, 99, 111, 100, 101, 99, 105, 100, // audiocodecid
0
});
baos.write(ByteBuffer.allocate(8).putDouble(LINEAR_PCM_LITTLE_ENDIAN).array());
baos.write(new byte[] {0, 0, 9});
} catch (IOException e) {
throw new RuntimeException(e);
}
addTag(TAG_TYPE_SCRIPT, 0, baos.toByteArray());
}

/**
* 添加一个tag
* @param tagType 类型
* @param timestamp 时间戳从0开始
* @param data 数据
*/
private void addTag(int tagType, long timestamp, byte[] data) {
if(finish) {
return;
}
int dataSize = data.length;
if(dataSize >= 256 * 256 * 256) {
throw new RuntimeException("tag数据超过最大值");
}
if(timestamp > 0xFFFFFF) {
throw new RuntimeException("时间戳超过最大值");
}
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
baos.write(new byte[] {
(byte) tagType, // tagType
(byte) (((dataSize & 0B0111111110000000000000000) >>> 16)), (byte) ((dataSize & 0B01111111100000000) >>> 8), (byte) (dataSize & 0B011111111), // dataSize
(byte) (((timestamp & 0B0111111110000000000000000) >>> 16)), (byte) ((timestamp & 0B01111111100000000) >>> 8), (byte) (timestamp & 0B011111111), // timestamp
(byte) ((timestamp & 0B011111111000000000000000000000000) >>> 24), // timestampExtended
0, 0, 0, // streamID
});
baos.write(data); // data
long tagSize = dataSize + 11;
baos.write(new byte[] {
(byte) ((tagSize & 0B011111111000000000000000000000000) >>> 24),
(byte) (((tagSize & 0B0111111110000000000000000) >>> 16)),
(byte) ((tagSize & 0B01111111100000000) >>> 8),
(byte) (tagSize & 0B011111111)
}); // previousTagSize

dataQueue.put(baos.toByteArray());
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}

/**
* 将图片转为screen video编码的数据
* @param img
* @param keyFrame 是否关键帧
* @return
*/
private byte[] getScreenVideoPacket(BufferedImage img, boolean keyFrame) {
int width = img.getWidth();
int height = img.getHeight();

if(width > MAX_PIXEL || height > MAX_PIXEL) {
Image smallImg = img.getScaledInstance(width >= height ? MAX_PIXEL : -1, height >= width ? MAX_PIXEL : -1, Image.SCALE_AREA_AVERAGING);
width = smallImg.getWidth(null);
height = smallImg.getHeight(null);

img = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
img.getGraphics().drawImage(smallImg, 0, 0, width, height, null);
}

int w = ((BLOCK_SIZE / 16 - 1) << 12) | width;
int h = ((BLOCK_SIZE / 16 - 1) << 12) | height;

ByteArrayOutputStream baos = new ByteArrayOutputStream();
baos.write((w & 0B01111111100000000) >>> 8); // BlockWidth 4bit (actualWidth / 16) - 1
baos.write(w & 0B011111111); // ImageWidth 12bit
baos.write((h & 0B01111111100000000) >>> 8); // BlockHeight 4bit (actualHeight / 16) - 1
baos.write(h & 0B011111111); // ImageHeight 12bit

for(int y = height - 1; y >= 0; y -= BLOCK_SIZE) {
for(int x = 0; x < width; x += BLOCK_SIZE) {
// ImageBlocks [{
// DataSize 16bit 如果为0 则下个字段不存在
// Data bgr左下到右上 zlib压缩
// }]
byte[] data = getBlockData(img, x, y, (width - x) < BLOCK_SIZE ? (width - x) : BLOCK_SIZE, (y + 1) < BLOCK_SIZE ? (y + 1) : BLOCK_SIZE, keyFrame);
if(null == data) {
baos.write(0);
baos.write(0);
} else {
baos.write((data.length & 0B01111111100000000) >>> 8);
baos.write(data.length & 0B011111111);
baos.write(data, 0, data.length);
}
}
}

return baos.toByteArray();
}

/**
* 获取block数据
* @param img
* @param x
* @param y
* @param w
* @param h
* @param keyFrame
* @return
*/
private byte[] getBlockData(BufferedImage img, int x, int y, int w, int h, boolean keyFrame) {
int idx = 0;
int[] rgbs = new int[w * h];
for(int i = 0; i < h; i++) {
for(int j = 0; j < w; j++) {
rgbs[idx++] = img.getRGB(x + j, y - i) & 0xFFFFFF;
}
}

int[] lastRgbs = lastFrame.get(x + "_" + y);
lastFrame.put(x + "_" + y, rgbs);
if(!keyFrame) {
idx = 0;
boolean same = true;
for(int rgb : rgbs) {
if(lastRgbs[idx++] != rgb) {
same = false;
break;
}
}
if(same) {
return null;
}
}

ByteArrayOutputStream baos = new ByteArrayOutputStream();
for(int rgb : rgbs) {
baos.write(rgb & 0xFF);
baos.write((rgb >> 8) & 0xFF);
baos.write((rgb >> 16) & 0xFF);
}

Deflater compresser = new Deflater();
compresser.setInput(baos.toByteArray());
compresser.finish();

baos.reset();
byte[] output = new byte[1024];
while(!compresser.finished()) {
int len = compresser.deflate(output);
baos.write(output, 0, len);
}
compresser.end();

return baos.toByteArray();
}
}

/**
* 音频工具类 readData()获取麦克风音频数据
*
*/
class AudioHelpers {
public static final float RATE = 11000;
public static final int SAMPLE_SIZE_IN_BITS = 8;
public static final int CHANNELS = 1;
private static final AudioFormat DEFAULT_AUDIO_FORMAT = new AudioFormat(RATE, SAMPLE_SIZE_IN_BITS, CHANNELS, 16 == SAMPLE_SIZE_IN_BITS, false);

private static TargetDataLine targetDataLine;

static {
try {
DataLine.Info targetInfo = new DataLine.Info(TargetDataLine.class, DEFAULT_AUDIO_FORMAT);
targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
targetDataLine.open(DEFAULT_AUDIO_FORMAT);
targetDataLine.start();
} catch (Exception e) {
throw new RuntimeException(e);
}
}

public static int getBufferSize() {
return targetDataLine.getBufferSize();
}

public static int readData(byte[] bs) {
return targetDataLine.read(bs, 0, targetDataLine.available());
}
}


/**
* 视频转码工具类
*/
//@Component
class ConverVideoUtils {
// ffmpeg.exe的地址
private static final String ffmpegPath = "E:\\aa\\ffmpeg\\bin\\ffmpeg.exe";

/**
* 转换视频格式
*
* @param sourceVideoPath 视频地址
* @return
*/
public String beginConver(String sourceVideoPath,String storagePath) {
//转码格式
// String targetExtension = appProperties.getVideoFormat();
String targetExtension = ".mp4";
//是否删除原文件
// Boolean isDeleteResult = appProperties.getIsDeleteResult();
Boolean isDeleteResult = true;
File fi = new File(sourceVideoPath);
String fileName = fi.getName();
//文件名不带扩展名
String fileRealName = fileName.substring(0, fileName.lastIndexOf("."));
// System.out.println("接收到奥文件("+sourceVideoPath+")需要转换");
if (!checkfile(sourceVideoPath)) {
System.out.println(sourceVideoPath + "文件不存在");
return "";
}
long beginTime = System.currentTimeMillis();
// System.out.println("开始转换文件("+sourceVideoPath+")");
String path = process(fileRealName, sourceVideoPath, targetExtension, isDeleteResult, storagePath);
if (path != null) {
// System.out.println("转换成功");
long endTime = System.currentTimeMillis();
long timeCha = (endTime - beginTime);
String totalTime = sumTime(timeCha);
// System.out.println("转换视频格式共用了:"+ totalTime + " ");
if (isDeleteResult) {
deleteFile(sourceVideoPath);
}
return path;
} else {
return "";
}
}

/**
* 实际转换视频格式的方法
*
* @param fileRealName 文件名不带扩展名
* @param sourceVideoPath 原文件地址
* @param targetExtension 目标视频扩展名
* @param isDeleteResult 转换完成后是否删除源文件
* @return
*/
private String process(String fileRealName, String sourceVideoPath, String targetExtension, boolean isDeleteResult, String storagePath) {
int type = checkContentType(sourceVideoPath);
String path = "";
if (type == 0) {
//如果type为0用ffmpeg直接转换
path = processVideoFormat(sourceVideoPath, fileRealName, targetExtension, isDeleteResult, storagePath);
} else if (type == 1) {
//如果type为1,将其他文件先转换为avi,然后在用ffmpeg转换为指定格式
String aviFilePath = processAVI(fileRealName, sourceVideoPath);
if (aviFilePath == null) {
// avi文件没有得到
return "";
} else {
System.out.println("开始转换:");
path = processVideoFormat(aviFilePath, fileRealName, targetExtension, isDeleteResult, storagePath);
if (isDeleteResult) {
deleteFile(aviFilePath);
}
}
}
return path;
}

/**
* 检查文件类型
*
* @param sourceVideoPath 原文件地址
* @return
*/
private int checkContentType(String sourceVideoPath) {
String type = sourceVideoPath.substring(sourceVideoPath.lastIndexOf(".") + 1).toLowerCase();
// ffmpeg能解析的格式:(asx,asf,mpg,wmv,3gp,mp4,mov,avi,flv等)
if (type.equals("avi")) {
return 0;
} else if (type.equals("mpg")) {
return 0;
} else if (type.equals("wmv")) {
return 0;
} else if (type.equals("3gp")) {
return 0;
} else if (type.equals("mov")) {
return 0;
} else if (type.equals("mp4")) {
return 0;
} else if (type.equals("asf")) {
return 0;
} else if (type.equals("asx")) {
return 0;
} else if (type.equals("flv")) {
return 0;
}
// 对ffmpeg无法解析的文件格式(wmv9,rm,rmvb等),
// 可以先用别的工具(mencoder)转换为avi(ffmpeg能解析的)格式.
else if (type.equals("wmv9")) {
return 1;
} else if (type.equals("rm")) {
return 1;
} else if (type.equals("rmvb")) {
return 1;
}
return 9;
}

/**
* 检查文件是否存在
*
* @param path 文件地址
* @return
*/
private boolean checkfile(String path) {
File file = new File(path);
if (!file.isFile()) {
return false;
} else {
return true;
}
}

/**
* 对ffmpeg无法解析的文件格式(wmv9,rm,rmvb等), 可以先用别的工具(mencoder)转换为avi(ffmpeg能解析的)格式.
*
* @param fileRealName 文件名不带扩展名
* @param sourceVideoPath 原文件地址
* @return
*/
private String processAVI(String fileRealName, String sourceVideoPath) {
/**
* mencoder.exe的地址
*/
// String menCoderPath = appProperties.getMencoderPath();
String menCoderPath = "";
/**
* 转码后的存放视频地址 avi格式
*/
// String videoFolder = appProperties.getUploadAndFormatPath();
String videoFolder = "";
List<String> commend = new java.util.ArrayList<>();
commend.add(menCoderPath);
commend.add(sourceVideoPath);
commend.add("-oac");
commend.add("mp3lame");
commend.add("-lameopts");
commend.add("preset=64");
commend.add("-ovc");
commend.add("xvid");
commend.add("-xvidencopts");
commend.add("bitrate=600");
commend.add("-of");
commend.add("avi");
commend.add("-o");
commend.add(videoFolder + fileRealName + ".avi");
try {
ProcessBuilder builder = new ProcessBuilder();
builder.command(commend);
Process p = builder.start();
doWaitFor(p);
return videoFolder + fileRealName + ".avi";
} catch (Exception e) {
e.printStackTrace();
return null;
}
}

/**
* 转换为指定格式
* ffmpeg能解析的格式:(asx,asf,mpg,wmv,3gp,mp4,mov,avi,flv等)
*
* @param oldFilePath 源文件地址
* @param fileRealName 文件名不带扩展名
* @param targetExtension 目标格式扩展名 .xxx
* @return
*/
private String processVideoFormat(String oldFilePath, String fileRealName, String targetExtension, Boolean isDeleteResult, String storagePath) {
/**
* 转码后的存放视频地址 mp4格式
*/
String targetFolder = oldFilePath.substring(0, oldFilePath.lastIndexOf("\\") + 1);
if (!checkfile(oldFilePath)) {
System.out.println(oldFilePath + "文件不存在");
return "";
}
List<String> commend = new ArrayList<>();
commend.add(ffmpegPath);
commend.add("-i");
commend.add(oldFilePath);
commend.add("-vcodec");
commend.add("mpeg4");
commend.add("-q");
commend.add("0");
commend.add("-y");
commend.add(targetFolder + fileRealName + targetExtension);
try {
ProcessBuilder builder = new ProcessBuilder();
builder.command(commend);
Process p = builder.start();
doWaitFor(p);
p.destroy();
String videoPath = targetFolder + fileRealName + targetExtension;
String path = this.processVideoFormatH264(videoPath, ffmpegPath, targetFolder, targetExtension, isDeleteResult,storagePath);
System.out.println("视频存储在:" + path);
System.out.println("==========================================success==========================================");
return path;
} catch (Exception e) {
e.printStackTrace();
return "";
}
}

/**
* 将mpeg4转为h264编码 为了支持播放器
*
* @param path
* @param ffmpegPath
* @return
*/
private String processVideoFormatH264(String path, String ffmpegPath, String targetFolder, String targetExtension, Boolean isDeleteResult, String storagePath) {
if (!checkfile(path)) {
System.out.println(path + "文件不存在");
return "";
}
Calendar now = Calendar.getInstance();
int month = now.get(Calendar.MONTH) + 1;
int day = now.get(Calendar.DAY_OF_MONTH);
int hour = now.get(Calendar.HOUR_OF_DAY);
int minute = now.get(Calendar.MINUTE);
int second = now.get(Calendar.SECOND);
String time = "" + now.get(Calendar.YEAR) + (month >= 10 ? month : "0" + month) + (day > 10 ? day : "0" + day) + (hour > 10 ? hour : "0" + hour) + (minute > 10 ? minute : "0" + minute) + (second > 10 ? second : "0" + second);
// String newFilePath = targetFolder + time + targetExtension;
String newFilePath = storagePath;
List<String> commend = new ArrayList<>();
commend.add(ffmpegPath);
commend.add("-i");
commend.add(path);
commend.add("-vcodec");
commend.add("h264");
commend.add("-q");
commend.add("0");
commend.add("-y");
commend.add(newFilePath);
try {
ProcessBuilder builder = new ProcessBuilder();
builder.command(commend);
Process p = builder.start();
doWaitFor(p);
p.destroy();
if (isDeleteResult) {
deleteFile(path);
}
return newFilePath;
} catch (Exception e) {
e.printStackTrace();
return "";
}
}

public int doWaitFor(Process p) {
InputStream in = null;
InputStream err = null;
int exitValue = -1;
try {
in = p.getInputStream();
err = p.getErrorStream();
boolean finished = false;
while (!finished) {
try {
while (in.available() > 0) {
in.read();
}
while (err.available() > 0) {
err.read();
}
exitValue = p.exitValue();
finished = true;
} catch (IllegalThreadStateException e) {
Thread.sleep(500);
}
}
} catch (Exception e) {
System.out.println("doWaitFor();: unexpected exception - " + e.getMessage());
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException e) {
System.out.println(e.getMessage());
}
if (err != null) {
try {
err.close();
} catch (IOException e) {
System.out.println(e.getMessage());
}
}
}
return exitValue;
}

/**
* 删除文件方法
*
* @param filepath
*/
public void deleteFile(String filepath) {
File file = new File(filepath);
if (file.delete()) {
// System.out.println("文件" + filepath + "已删除");
}
}

/**
* 计算转码时间
*
* @param ms
* @return
*/
public String sumTime(long ms) {
int ss = 1000;
long mi = ss * 60;
long hh = mi * 60;
long dd = hh * 24;
long day = ms / dd;
long hour = (ms - day * dd) / hh;
long minute = (ms - day * dd - hour * hh) / mi;
long second = (ms - day * dd - hour * hh - minute * mi) / ss;
long milliSecond = ms - day * dd - hour * hh - minute * mi - second
* ss;
String strDay = day < 10 ? "0" + day + "天" : "" + day + "天";
String strHour = hour < 10 ? "0" + hour + "小时" : "" + hour + "小时";
String strMinute = minute < 10 ? "0" + minute + "分" : "" + minute + "分";
String strSecond = second < 10 ? "0" + second + "秒" : "" + second + "秒";
String strMilliSecond = milliSecond < 10 ? "0" + milliSecond : ""
+ milliSecond;
strMilliSecond = milliSecond < 100 ? "0" + strMilliSecond + "毫秒" : ""
+ strMilliSecond + " 毫秒";
return strDay + " " + strHour + ":" + strMinute + ":" + strSecond + " "
+ strMilliSecond;
}
}
posted @ 2022-05-14 21:57  灰色人生qwer  阅读(3)  评论(0)    收藏  举报