ffmpeg
- sudo apt-get upgrade
` ffmpeg -i xxx.mp3 xxx.ogg
- 音频转码
` ffmpeg -i xxx.mp4 xxx.ogv - 视频转码
播放声音
<audio controls src="xxx.mp3"></audio>
<audio controls>
<soruce src="xxx.mp3">
<soruce src="xxx.ogg">
</audio>
播放视频
<vedio controls src="xxx.mp4"></vedio>
<vedio controls>
<soruce src="xxx.mp4">
<soruce src="xxx.ogv">
</vedio>
使用麦克风
<audio controls></audio>
(async function(){
let stream = await navigator.mediaDevices.getUserMedia({video:false,audio:true})
document.querySelector("audio").srcObject = stream
})()
利用vue
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>test</title>
<script src="https://cdn.staticfile.org/vue/2.2.2/vue.min.js"></script>
</head>
<body>
<div id="vueapp">
<div>
<audio controls ref="audio"></audio>
</div>
<div>
<select v-model="selectAudioDeviceIndex">
<option v-for="(d,index) in audioInputDevices" :value="index">{{d.label}}</option>
</select>
</div>
</div>
</body>
<script>
new Vue({
el:"#vueapp",
data:{
audioInputDevices:[],
selectAudioDeviceIndex:0
},
mounted(){
this._initVueApp()
},
methods:{
async _initVueApp(){
let devices = await navigator.mediaDevices.enumerateDevices()
let audioInputDevices = devices.filter(value => value.kind==='audioinput')
// console.log(audioInputDevices)
this.audioInputDevices.length = 0
this.audioInputDevices.push(...audioInputDevices)
console.log(this.audioInputDevices)
},
async showSelectedDevice(){
let deviceInfo = this.audioInputDevices[this.selectAudioDeviceIndex]
let stream = await navigator.mediaDevices.getUserMedia({video:false,audio:deviceInfo})
this.$refs.audio.srcObject = stream
}
},
watch:{
selectAudioDeviceIndex(val,oldVal){
console.log(val)
this.showSelectedDevice()
}
}
})
</script>
</html>
录制声音
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
<script src="https://cdn.staticfile.org/vue/2.2.2/vue.min.js"></script>
</head>
<body>
<div id="vueapp">
<div>
<button :disabled="recording" @click="btnRecordClick">Record</button>
<button :disabled="paused||!recording" @click="btnPauseClick">Pause</button>
<button :disabled="!paused||!recording" @click="btnResumeClick">Resume</button>
<button :disabled="!recording" @click="btnStopClick">Stop</button>
<button :disabled="!currentWebmData" @click="btnPlayClick">Play</button>
</div>
<audio controls ref="player"></audio>
</div>
</body>
<script>
new Vue({
el:"#vueapp",
data(){
return {
_recoder:{},
_stream:{},
currentWebmData:null,
recording:false,
paused:false
}
},
mounted(){
this._initApp()
},
methods:{
async _initApp(){
this._stream = await navigator.mediaDevices.getUserMedia({audio:true,video:false})
this._recoder = new MediaRecorder(this._stream,{mimeType:"video/webm;codecs=h264"})
this._recoder.ondataavailable = this.recorder_dataAvailableHandler.bind(this)
},
recorder_dataAvailableHandler(e){
console.log(e)
this.currentWebmData = e.data
},
btnRecordClick(){
this.recording = true
this.paused = false
this._recoder.start()
},
btnPauseClick(){
this.paused = true
this._recoder.pause()
},
btnResumeClick(){
this.paused=false
this._recoder.resume()
},
btnStopClick(){
this.recording = false
this._recoder.stop()
},
btnPlayClick(){
this.$refs.player.src=URL.createObjectURL(this.currentWebmData)
}
}
})
</script>
</html>
打开摄像头
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<video autoplay width="400" height="300"></video>
</body>
<script>
(async function(){
document.querySelector("vedio").srcObject = await navigator.mediaDevices.getUserMedia({
audio:false,
video:true
})
})()
</script>
</html>
vue拍照
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
<script src="https://cdn.staticfile.org/vue/2.2.2/vue.min.js"></script>
</head>
<body>
<div id="vueapp">
<video ref="video" autoplay width="400" height="300"></video>
<button @click="btnClick"> take photo</button>
<canvas ref="canvas" width="400" height="300"></canvas>
</div>
</body>
<script>
new Vue({
el:"#vueapp",
data(){
return {
_context2d:{}
}
},
mounted() {
this._initAueApp()
},
methods:{
async _initAueApp(){
this.$refs.video.srcObject = await navigator.mediaDevices.getUserMedia({audio:false,video:true})
this._context2d = this.$refs.canvas.getContext("2d")
},
btnClick(){
this._context2d.drawImage(this.$refs.video,0,0,400,300)
}
}
})
</script>
</html>
录制视频
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
<script src="https://cdn.staticfile.org/vue/2.2.2/vue.min.js"></script>
</head>
<body>
<div id="vueapp">
<div>
<video ref="preview" width="200" height="150" autoplay></video>
</div>
<div>
<button :disabled="recording" @click="btnRecordClick">Record</button>
<button :disabled="paused||!recording" @click="btnPauseClick">Pause</button>
<button :disabled="!paused||!recording" @click="btnResumeClick">Resume</button>
<button :disabled="!recording" @click="btnStopClick">Stop</button>
<button :disabled="!currentWebmData" @click="btnPlayClick">Play</button>
</div>
<video controls ref="player" width="400" height="300"></video>
</div>
</body>
<script>
new Vue({
el:"#vueapp",
data(){
return {
_recoder:{},
_stream:{},
currentWebmData:null,
recording:false,
paused:false
}
},
mounted(){
this._initApp()
},
methods:{
async _initApp(){
this._stream = await navigator.mediaDevices.getUserMedia({audio:false,video:true})
this.$refs.preview.srcObject = this._stream
this._recoder = new MediaRecorder(this._stream,{mimeType:"video/webm;codecs=h264"})
this._recoder.ondataavailable = this.recorder_dataAvailableHandler.bind(this)
},
recorder_dataAvailableHandler(e){
console.log(e)
this.currentWebmData = e.data
},
btnRecordClick(){
this.recording = true
this.paused = false
this._recoder.start()
},
btnPauseClick(){
this.paused = true
this._recoder.pause()
},
btnResumeClick(){
this.paused=false
this._recoder.resume()
},
btnStopClick(){
this.recording = false
this._recoder.stop()
},
btnPlayClick(){
this.$refs.player.src=URL.createObjectURL(this.currentWebmData)
}
}
})
</script>
</html>
录制屏幕
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
<script src="https://cdn.staticfile.org/vue/2.2.2/vue.min.js"></script>
</head>
<body>
<div id="vueapp">
<div>
<video ref="preview" width="200" height="150" autoplay></video>
</div>
<div>
<button :disabled="recording" @click="btnRecordClick">Record</button>
<button :disabled="paused||!recording" @click="btnPauseClick">Pause</button>
<button :disabled="!paused||!recording" @click="btnResumeClick">Resume</button>
<button :disabled="!recording" @click="btnStopClick">Stop</button>
<button :disabled="!currentWebmData" @click="btnPlayClick">Play</button>
</div>
<video controls ref="player" width="400" height="300"></video>
</div>
</body>
<script>
new Vue({
el:"#vueapp",
data(){
return {
_recoder:{},
_stream:{},
currentWebmData:null,
recording:false,
paused:false
}
},
mounted(){
this._initApp()
},
methods:{
async _initApp(){
this._stream = await navigator.mediaDevices.getDisplayMedia()
this.$refs.preview.srcObject = this._stream
this._recoder = new MediaRecorder(this._stream,{mimeType:"video/webm;codecs=h264"})
this._recoder.ondataavailable = this.recorder_dataAvailableHandler.bind(this)
},
recorder_dataAvailableHandler(e){
console.log(e)
this.currentWebmData = e.data
},
btnRecordClick(){
this.recording = true
this.paused = false
this._recoder.start()
},
btnPauseClick(){
this.paused = true
this._recoder.pause()
},
btnResumeClick(){
this.paused=false
this._recoder.resume()
},
btnStopClick(){
this.recording = false
this._recoder.stop()
},
btnPlayClick(){
this.$refs.player.src=URL.createObjectURL(this.currentWebmData)
}
}
})
</script>
</html>
录制屏幕音视频
let audioStream = await.navigator.mediaDevices.getUserMedia({video:false,audio:true})
let screenStream = await navigator.mediaDevices.getUserMedia({
audio:false,
video:{
mandatory:{
chromeMediaSource:'desktop',
minWidth:1280,
maxWidth:1280,
minHeight:720,
maxHeight:720
}
}
})
screenStream.getVideoTracks().forEach(value=>audioStream.addTrack(value))
let recoder = new MediaRecorder(this._stream,{mimeType:"video/webm;codecs=h264"})
recoder.ondataavailable = e=>{
console.log(e)
let buffer = new Uint8Array(await e.data.arrayBuffer())
}
recoder.start()
合成视频
const SCREEN_WIDTH = 1024
const SCREEN_HEIGHT = 640
let playerCanvas = new PlayerCanvas(SCREEN_WIDTH,SCREEN_HEIGHT)
let stream = new MediaStream()
let audioStream = await.navigator.mediaDevices.getUserMedia({video:false,audio:true})
audioStream.getVideoTracks().forEach(value=>stream.addTrack(value))
let cameraStream = await.navigator.mediaDevices.getUserMedia({video:true,audio:false})
playerCanvas.setCameraVideo(createVideoElementWithStream(cameraStream))
let screenStream = await navigator.mediaDevices.getUserMedia({
audio:false,
video:{
mandatory:{
chromeMediaSource:'desktop',
minWidth:SCREEN_WIDTH,
maxWidth:SCREEN_WIDTH,
minHeight:SCREEN_HEIGHT,
maxHeight:SCREEN_HEIGHT
}
}
})
playerCanvas.setScreenVideo(createVideoElementWithStream(screenStream))
let playerCanvasStream = playerCanvas.canvas. captureStream()
playerCanvasStream.getTracks().forEach(t=>stream.addTrack(t))
createVideoElementWithStream(stream){
let video = document.createElement("video")
video.autoplay = true
video.srcObject = stream
return video
}
class PlayerCanvas{
constructor(width,height){
this._canvas = document.createElement("canvas")
this._canvas.width = width
this._canvas.height = height
this._canvasWidth = width
this._canvasHegith = hegith
this._camera_video_width = 200
this._camera_video_height = 150
this._context2d = this._canvas.getContext("2d")
requestAnimationFrame(this._animationFrameHandler.bind(this))
}
setScreenVideo(video){
this._screenVideo = video
}
setCameraVideo(video){
this._cameraVideo = video
}
get canvas(){
return this._canvas
}
_animationFrameHandler(){
if(this._screenVideo){
this._context2d.drawImage(this._screenVideo,0,0, this._canvasWidth,this._canvasHegith)
}
if(this._cameraVideo){
this._context2d.drawImage(this.
_cameraVideo,
this._canvasWidth-this._camera_video_width,
this._canvasHegith-this._camera_video_height,
this._camera_video_width,
this._camera_video_height)
}
requestAnimationFrame(this._animationFrameHandler.bind(this))
}
}
let recoder = new MediaRecorder(stream,{mimeType:"video/webm;codecs=h264"})
recoder.ondataavailable = e=>{
console.log(e)
let buffer = new Uint8Array(await e.data.arrayBuffer())
}
function startRecord(){
recoder.start()
}
startRecord()
视频滤镜
let video = document.createElement("video")
video.src="xx.mp4"
let previewContext2d = document.getElementById("preview").getContext("2d")
let resultContext2d = document.getElementById("result").getContext("2d")
requestAnimationFrame(animationFrameHandler.bind(this))
animationFrameHandler(){
previewContext2d.drawImage(video,0,0,previewContext2d.width,previewContext2d.heigth)
let srcImageData = previewContext2d.getImageData(0,0,previewContext2d.width,previewContext2d.heigth)
let destImageData = resultContext2d.createImageData(srcImageData.width,srcImageData.height)
let length = srcImageData.data.length
let rawData = srcImageData.data
for(let i=0;i<length;i+=4){
let c =Math.floor((rawData[i]+rawData[i+1]+rawData[i+2]) /3)
destImageData.data[i] = c
destImageData.data[i+1] = c
destImageData.data[i+2] = c
destImageData.data[i+3] = 255
}
resultContext2d.putImageData(destImageData,0,0)
requestAnimationFrame(animationFrameHandler.bind(this))
}
video.play()
video.pause()
HLS播放器
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
<script src="hls.js"></script>
</head>
<body>
<videp id="video" width="400" height="300" controls ></videp>
</body>
<script>
let video = document.getElementById("video")
if(Hls.isSupported()){
var hls = new Hls()
hls.loadSource('xxx.m3u8')
hls.attachMedia(video)
hls.on(Hls.Events.MANIFEST_PARSED,function(){
video.play()
})
}
</script>
</html>
nodejs
const http = require("http")
http.createServer((req,res)=>{
res.end("hello world")
}).linsten(3000)
webRtc
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<video id="video" style="width:640px;height: 480px;"></video>
<canvas id="output" style="display: none"></canvas>
</body>
<script type="text/javascript">
var back = document.getElementById("output")
var backContext = document.getElementById("output").getContext("2d")
var video = document.getElementById("video")
var socket
var interval
setTimeout("init()",100)
function init(){
socket = new WebSocket("ws://192.168.1.8:8080/websocket")
socket.onopen = onOpen
socket.onclose = onClose
}
function onOpen(){
interval = setInterval(function () {
draw()
},50)
}
function onClose(){
init()
}
function draw() {
backContext.drawImage(video,0,0,back.width,back.height)
socket.send(back.toDataURL("image/jpeg"),0.5)
}
function success(stream){
console.log(stream)
console.log(document.getElementById("video"))
video.srcObject = stream
}
navigator.getUserMedia = navigator.mediaDevices.getUserMedia||navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia||navigator.oGetUserMedia;
navigator.getUserMedia({video:true,audio:false},success,console.log);
</script>
</html>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<img id="recive" style="width: 400px;height: 330px;">
</body>
<script type="text/javascript">
let socket = new WebSocket("ws://192.168.1.8:8080/websocket")
let image = document.getElementById("recive")
socket.onmessage = function (data) {
image.src = data.data
}
</script>
</html>
package com.sws.web.websocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.websocket.OnClose;
import javax.websocket.OnMessage;
import javax.websocket.OnOpen;
import javax.websocket.Session;
import javax.websocket.server.ServerEndpoint;
import java.io.IOException;
import java.util.Vector;
@ServerEndpoint("/websocket")
@Component
public class WebSocket {
private static Logger logger = LoggerFactory.getLogger(WebSocket.class);
private Session session;
private static Vector<WebSocket> clients = new Vector<WebSocket>();
@OnOpen
public void onOpen(Session session){
logger.info("建立了一个新连接");
this.session = session;
clients.add(this);
}
@OnClose
public void onClose(){
logger.info("断开了一个连接");
clients.remove(this);
}
@OnMessage
public void onMessage(String message,Session session){
for(WebSocket client : clients){
try {
client.session.getBasicRemote().sendText(message);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}