javascript:Audio Play
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Audio Play</title>
<meta http-equiv="description" content="涂聚文, Geovin Du,geovindu" />
<meta name="description" content="涂聚文, Geovin Du,geovindu" />
<meta name="keywords" content="HTML5, audio, web, javascript" />
<meta name="author" content="涂聚文, Geovin Du,geovindu" />
</head>
<body>
<button id="btn">
Start
</button>
<div id="wrapper"></div>
<br />
<canvas id="canvas" style="background:#CCCFFF;width:512px;height:255px;"></canvas>
<script type="text/javascript">
document.getElementById("btn").addEventListener("click", function() {
var canvas = document.getElementById('canvas');
var canvasCtx = canvas.getContext("2d");
var audioContext = new (window.AudioContext || window.webkitAudioContext|| window.mozAudioContext || window.msAudioContext)();
var player = document.getElementById('audio_player');
var analyser = audioContext.createAnalyser();
var data = new Uint8Array(analyser.frequencyBinCount);
function render() {
analyser.getByteFrequencyData(data);
canvasCtx.clearRect(0, 0, canvas.width, canvas.height);
for (var i = 0, l = data.length; i < l; i++) {
canvasCtx.fillStyle = "#ffff00"; //背景色
canvasCtx.fillRect(i, -(canvas.height/255) * data[i], 1, canvas.height);
}
requestAnimationFrame(render);
}
requestAnimationFrame(render);
var audio = new Audio();
audio.loop = true;
audio.autoplay = false;
audio.crossOrigin = "anonymous";
audio.addEventListener('error', function(e) {
console.log(e);
});
audio.src = "https://greggman.github.io/doodles/sounds/DOCTOR VOX - Level Up.mp3";
//audio.play();
audio.controls = true;
document.getElementById("wrapper").append(audio);
audio.addEventListener('canplay', function() {
var audioSourceNode = audioContext.createMediaElementSource(audio);
audioSourceNode.connect(analyser);
analyser.connect(audioContext.destination);
});
});
</script>
</body>
</html>
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="description" content="涂聚文, Geovin Du,geovindu" />
<meta name="description" content="涂聚文, Geovin Du,geovindu" />
<meta name="keywords" content="HTML5, audio, web, javascript" />
<meta name="author" content="涂聚文, Geovin Du,geovindu" />
<title>播放音乐</title>
</head>
<body>
<div id="effect" class="part">
<h3>效果:</h3>
<div class="show">
<div class="demo">
<button id="button">经过我</button>
</div>
</div>
</div>
<script type="text/javascript">
//https://frost.cs.uchicago.edu/ref/JavaScript/developer.mozilla.org/en-US/docs/Web/API/AudioContext.html
//https://caniuse.com/?search=webaudio
/*
IE
Edge*
Firefox
Chrome
Safari
Opera
Safari on iOS
Opera Mini*
Android Browser*
Opera Mobile*
Chrome for Android
Firefox for Android
UC Browser for Android
Samsung Internet
QQ Browser
Baidu Browser
KaiOS Browse
*/
window.AudioContext = window.AudioContext || window.webkitAudioContext|| window.mozAudioContext || window.msAudioContext;
(function () {
if (!window.AudioContext) {
alert('当前浏览器不支持Web Audio API');
return;
}
// 按钮元素
var eleButton = document.getElementById('button');
var AudioContext =window.AudioContext || window.webkitAudioContext|| window.mozAudioContext || window.msAudioContext;// window.AudioContext || window.webkitAudioContext;
// 创建新的音频上下文接口
var audioCtx = new window.AudioContext(); //window.AudioContext || window.webkitAudioContext|| window.mozAudioContext || window.msAudioContext;
// new AudioContext();
// 发出的声音频率数据,表现为音调的高低
var arrFrequency = [196.00, 220.00, 246.94, 261.63, 293.66, 329.63, 349.23, 392.00, 440.00, 493.88, 523.25, 587.33, 659.25, 698.46, 783.99, 880.00, 987.77, 1046.50];
// 音调依次递增或者递减处理需要的参数
var start = 0, direction = 1;
// 时间类型 鼠标移动,或点击
var eventType = ('ontouchstart' in window || 'ontouchstart' in document)? 'touchstart': 'mouseenter';
// 鼠标hover我们的按钮的时候
eleButton.addEventListener(eventType, function () {
// 当前频率
var frequency = arrFrequency[start];
// 如果到头,改变音调的变化规则(增减切换)
if (!frequency) {
direction = -1 * direction;
start = start + 2 * direction;
frequency = arrFrequency[start];
}
// 改变索引,下一次hover时候使用
start = start + direction;
// 创建一个OscillatorNode, 它表示一个周期性波形(振荡),基本上来说创造了一个音调
var oscillator = audioCtx.createOscillator();
// 创建一个GainNode,它可以控制音频的总音量
var gainNode = audioCtx.createGain();
// 把音量,音调和终节点进行关联
oscillator.connect(gainNode);
// audioCtx.destination返回AudioDestinationNode对象,表示当前audio context中所有节点的最终节点,一般表示音频渲染设备
gainNode.connect(audioCtx.destination);
// 指定音调的类型,其他还有square|triangle|sawtooth
oscillator.type = 'sine';
// 设置当前播放声音的频率,也就是最终播放声音的调调
oscillator.frequency.value = frequency;
// GainNode 接口表示音量变更。它是一个 AudioNode 音频处理模块,在输出前使用给定 增益 应用到输入。一个 GainNode 总是只有一个输入和一个输出,都通过同样数量的声道
// gainNode.gain返回一个AudioParam
// AudioParam 接口代表音频相关的参数, 通常是一个 AudioNode (例如 GainNode.gain) 的参数。一个 AudioParam 可以被设置为一个具体的值或者数值的改变 ,可以被安排在在一个具体的时刻并且遵循一个特定的模式发生。
// AudioParam.setValueAtTime() 在一个确切的时间,即时更改 AudioParam 的值,按照AudioContext.currentTime 的时间。新的值会被传递到 value 参数。
// currentTime是AudioContext的一个read-only属性,返回double秒(从0开始)表示一个只增不减的硬件时间戳,可以用来控制音频回放,实现可视化时间轴等等。
// 静止状态下返回的是0
// AudioParam.setValueAtTime(value, startTime)
gainNode.gain.setValueAtTime(0, audioCtx.currentTime);
// linearRampToValueAtTime方法作用是调整 AudioParam 的值,使其逐渐按线性变化。这个改变会从上一个事件指定的事件开始,跟随一个线性“斜坡”到参数给的新值,并在 endTime 参数给定的时间到达新值。
// AudioParam.exponentialRampToValueAtTime(value, endTime)
gainNode.gain.linearRampToValueAtTime(1, audioCtx.currentTime + 0.01);
// 音调从当前时间开始播放
oscillator.start(audioCtx.currentTime);
// 调整 AudioParam 的值,使其逐渐按指数变化。这个改变会从上一个事件指定的事件开始,跟随一个指数“斜坡”到参数给的新值,并在 endTime 参数给定的时间到达新值。
// 语法:AudioParam.exponentialRampToValueAtTime(value, endTime)
// 1秒内声音慢慢降低,是个不错的停止声音的方法
gainNode.gain.exponentialRampToValueAtTime(0.001, audioCtx.currentTime + 1);
// 1秒后完全停止声音
oscillator.stop(audioCtx.currentTime + 1);
});
})();
</script>
</body>
</html>
var constraints = {
video: false,
audio: false
}
navigator.mediaDevices.getUserMedia(constraints).then(function success(stream) {
/* do stuff */
}).catch(function(err) {
//log to console first
console.log(err); /* handle the error */
if (err.name == "NotFoundError" || err.name == "DevicesNotFoundError") {
//required track is missing
} else if (err.name == "NotReadableError" || err.name == "TrackStartError") {
//webcam or mic are already in use
} else if (err.name == "OverconstrainedError" || err.name == "ConstraintNotSatisfiedError") {
//constraints can not be satisfied by avb. devices
} else if (err.name == "NotAllowedError" || err.name == "PermissionDeniedError") {
//permission denied in browser
} else if (err.name == "TypeError" || err.name == "TypeError") {
//empty constraints object
} else {
//other errors
}
});
Regular Expression Language - Quick Reference | Microsoft Docs
https://docs.microsoft.com/en-us/dotnet/standard/base-types/regular-expression-language-quick-reference
C# in a Nutshell - Code Listings (albahari.com)
http://www.albahari.com/nutshell/E9-CH25.aspx
哲学管理(学)人生, 文学艺术生活, 自动(计算机学)物理(学)工作, 生物(学)化学逆境, 历史(学)测绘(学)时间, 经济(学)数学金钱(理财), 心理(学)医学情绪, 诗词美容情感, 美学建筑(学)家园, 解构建构(分析)整合学习, 智商情商(IQ、EQ)运筹(学)生存.---Geovin Du(涂聚文)
浙公网安备 33010602011771号