forked from Simnation/Main
4872 lines
3 MiB
JavaScript
4872 lines
3 MiB
JavaScript
![]() |
(function(){const t=document.createElement("link").relList;if(t&&t.supports&&t.supports("modulepreload"))return;for(const a of document.querySelectorAll('link[rel="modulepreload"]'))n(a);new MutationObserver(a=>{for(const s of a)if(s.type==="childList")for(const r of s.addedNodes)r.tagName==="LINK"&&r.rel==="modulepreload"&&n(r)}).observe(document,{childList:!0,subtree:!0});function i(a){const s={};return a.integrity&&(s.integrity=a.integrity),a.referrerPolicy&&(s.referrerPolicy=a.referrerPolicy),a.crossOrigin==="use-credentials"?s.credentials="include":a.crossOrigin==="anonymous"?s.credentials="omit":s.credentials="same-origin",s}function n(a){if(a.ep)return;a.ep=!0;const s=i(a);fetch(a.href,s)}})();/**
|
|||
|
* @vue/shared v3.5.16
|
|||
|
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
|||
|
* @license MIT
|
|||
|
**//*! #__NO_SIDE_EFFECTS__ */function q_(e){const t=Object.create(null);for(const i of e.split(","))t[i]=1;return i=>i in t}const ri={},il=[],Wa=()=>{},Hk=()=>!1,tA=e=>e.charCodeAt(0)===111&&e.charCodeAt(1)===110&&(e.charCodeAt(2)>122||e.charCodeAt(2)<97),V_=e=>e.startsWith("onUpdate:"),$i=Object.assign,W_=(e,t)=>{const i=e.indexOf(t);i>-1&&e.splice(i,1)},Qk=Object.prototype.hasOwnProperty,Zt=(e,t)=>Qk.call(e,t),yt=Array.isArray,nl=e=>iA(e)==="[object Map]",X2=e=>iA(e)==="[object Set]",Bt=e=>typeof e=="function",Ti=e=>typeof e=="string",Us=e=>typeof e=="symbol",Fi=e=>e!==null&&typeof e=="object",J2=e=>(Fi(e)||Bt(e))&&Bt(e.then)&&Bt(e.catch),Z2=Object.prototype.toString,iA=e=>Z2.call(e),jk=e=>iA(e).slice(8,-1),$2=e=>iA(e)==="[object Object]",K_=e=>Ti(e)&&e!=="NaN"&&e[0]!=="-"&&""+parseInt(e,10)===e,Su=q_(",key,ref,ref_for,ref_key,onVnodeBeforeMount,onVnodeMounted,onVnodeBeforeUpdate,onVnodeUpdated,onVnodeBeforeUnmount,onVnodeUnmounted"),nA=e=>{const t=Object.create(null);return i=>t[i]||(t[i]=e(i))},Gk=/-(\w)/g,Aa=nA(e=>e.replace(Gk,(t,i)=>i?i.toUpperCase():"")),zk=/\B([A-Z])/g,Br=nA(e=>e.replace(zk,"-$1").toLowerCase()),aA=nA(e=>e.charAt(0).toUpperCase()+e.slice(1)),vg=nA(e=>e?`on${aA(e)}`:""),ur=(e,t)=>!Object.is(e,t),ff=(e,...t)=>{for(let i=0;i<e.length;i++)e[i](...t)},ey=(e,t,i,n=!1)=>{Object.defineProperty(e,t,{configurable:!0,enumerable:!1,writable:n,value:i})},Np=e=>{const t=parseFloat(e);return isNaN(t)?e:t},qk=e=>{const t=Ti(e)?Number(e):NaN;return isNaN(t)?e:t};let K0;const sA=()=>K0||(K0=typeof globalThis<"u"?globalThis:typeof self<"u"?self:typeof window<"u"?window:typeof global<"u"?global:{});function $e(e){if(yt(e)){const t={};for(let i=0;i<e.length;i++){const n=e[i],a=Ti(n)?Yk(n):$e(n);if(a)for(const s in a)t[s]=a[s]}return t}else if(Ti(e)||Fi(e))return e}const Vk=/;(?![^(]*\))/g,Wk=/:([^]+)/,Kk=/\/\*[^]*?\*\//g;function Yk(e){const t={};return e.replace(Kk,"").split(Vk).forEach(i=>{if(i){const n=i.split(Wk);n.length>1&&(t[n[0].trim()]=n[1].trim())}}),t}function H(e){let t="";if(Ti(e))t=e;else if(yt(e))for(let i=0;i<e.length;i++){const n=H(e[i]);n&&(t+=n+" ")}else if(Fi(e))for(const i in e)e[i]&&(t+=i+" ");return t.trim()}const Xk="itemscope,allowfullscreen,formnovalidate,ismap,nomodule,novalidate,readonly",Jk=q_(Xk);function ty(e){return!!e||e===""}const iy=e=>!!(e&&e.__v_isRef===!0),m=e=>Ti(e)?e:e==null?"":yt(e)||Fi(e)&&(e.toString===Z2||!Bt(e.toString))?iy(e)?m(e.value):JSON.stringify(e,ny,2):String(e),ny=(e,t)=>iy(t)?ny(e,t.value):nl(t)?{[`Map(${t.size})`]:[...t.entries()].reduce((i,[n,a],s)=>(i[yg(n,s)+" =>"]=a,i),{})}:X2(t)?{[`Set(${t.size})`]:[...t.values()].map(i=>yg(i))}:Us(t)?yg(t):Fi(t)&&!yt(t)&&!$2(t)?String(t):t,yg=(e,t="")=>{var i;return Us(e)?`Symbol(${(i=e.description)!=null?i:t})`:e};/**
|
|||
|
* @vue/reactivity v3.5.16
|
|||
|
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
|||
|
* @license MIT
|
|||
|
**/let fn;class ay{constructor(t=!1){this.detached=t,this._active=!0,this._on=0,this.effects=[],this.cleanups=[],this._isPaused=!1,this.parent=fn,!t&&fn&&(this.index=(fn.scopes||(fn.scopes=[])).push(this)-1)}get active(){return this._active}pause(){if(this._active){this._isPaused=!0;let t,i;if(this.scopes)for(t=0,i=this.scopes.length;t<i;t++)this.scopes[t].pause();for(t=0,i=this.effects.length;t<i;t++)this.effects[t].pause()}}resume(){if(this._active&&this._isPaused){this._isPaused=!1;let t,i;if(this.scopes)for(t=0,i=this.scopes.length;t<i;t++)this.scopes[t].resume();for(t=0,i=this.effects.length;t<i;t++)this.effects[t].resume()}}run(t){if(this._active){const i=fn;try{return fn=this,t()}finally{fn=i}}}on(){++this._on===1&&(this.prevScope=fn,fn=this)}off(){this._on>0&&--this._on===0&&(fn=this.prevScope,this.prevScope=void 0)}stop(t){if(this._active){this._active=!1;let i,n;for(i=0,n=this.effects.length;i<n;i++)this.effects[i].stop();for(this.effects.length=0,i=0,n=this.cleanups.length;i<n;i++)this.cleanups[i]();if(this.cleanups.length=0,this.scopes){for(i=0,n=this.scopes.length;i<n;i++)this.scopes[i].stop(!0);this.scopes.length=0}if(!this.detached&&this.parent&&!t){const a=this.parent.scopes.pop();a&&a!==this&&(this.parent.scopes[this.index]=a,a.index=this.index)}this.parent=void 0}}}function sy(e){return new ay(e)}function ry(){return fn}function Zk(e,t=!1){fn&&fn.cleanups.push(e)}let hi;const wg=new WeakSet;class oy{constructor(t){this.fn=t,this.deps=void 0,this.depsTail=void 0,this.flags=5,this.next=void 0,this.cleanup=void 0,this.scheduler=void 0,fn&&fn.active&&fn.effects.push(this)}pause(){this.flags|=64}resume(){this.flags&64&&(this.flags&=-65,wg.has(this)&&(wg.delete(this),this.trigger()))}notify(){this.flags&2&&!(this.flags&32)||this.flags&8||uy(this)}run(){if(!(this.flags&1))return this.fn();this.flags|=2,Y0(this),cy(this);const t=hi,i=Ea;hi=this,Ea=!0;try{return this.fn()}finally{dy(this),hi=t,Ea=i,this.flags&=-3}}stop(){if(this.flags&1){for(let t=this.deps;t;t=t.nextDep)J_(t);this.deps=this.depsTail=void 0,Y0(this),this.onStop&&this.onStop(),this.flags&=-2}}trigger(){this.flags&64?wg.add(this):this.scheduler?this.scheduler():this.runIfDirty()}runIfDirty(){Hp(this)&&this.run()}get dirty(){return Hp(this)}}let ly=0,Du,xu;function uy(e,t=!1){if(e.flags|=8,t){e.next=xu,xu=e;return}e.next=Du,Du=e}function Y_(){ly++}function X_(){if(--ly>0)return;if(xu){let t=xu;for(xu=void 0;t;){const i=t.next;t.next=void 0,t.flags&=-9,t=i}}let e;for(;Du;){let t=Du;for(Du=void 0;t;){const i=t.next;if(t.next=void 0,t.flags&=-9,t.flags&1)try{t.trigger()}catch(n){e||(e=n)}t=i}}if(e)throw e}function cy(e){for(let t=e.deps;t;t=t.nextDep)t.version=-1,t.prevActiveLink=t.dep.activeLink,t.dep.activeLink=t}function dy(e){let t,i=e.depsTail,n=i;for(;n;){const a=n.prevDep;n.version===-1?(n===i&&(i=a),J_(n),$k(n)):t=n,n.dep.activeLink=n.prevActiveLink,n.prevActiveLink=void 0,n=a}e.deps=t,e.depsTail=i}function Hp(e){for(let t=e.deps;t;t=t.nextDep)if(t.dep.version!==t.version||t.dep.computed&&(fy(t.dep.computed)||t.dep.version!==t.version))return!0;return!!e._dirty}function fy(e){if(e.flags&4&&!(e.flags&16)||(e.flags&=-17,e.globalVersion===Ku)||(e.globalVersion=Ku,!e.isSSR&&e.flags&128&&(!e.deps&&!e._dirty||!Hp(e))))return;e.flags|=2;const t=e.dep,i=hi,n=Ea;hi=e,Ea=!0;try{cy(e);const a=e.fn(e._value);(t.version===0||ur(a,e._value))&&(e.flags|=128,e._value=a,t.version++)}catch(a){throw t.version++,a}finally{hi=i,Ea=n,dy(e),e.flags&=-3}}function J_(e,t=!1){const{dep:i,prevSub:n,nextSub:a}=e;if(n&&(n.nextSub=a,e.prevSub=void 0),a&&(a.prevSub=n,e.nextSub=void 0),i.subs===e&&(i.subs=n,!n&&i.computed)){i.computed.flags&=-5;for(let s=i.computed.deps;s;s=s.nextDep)J_(s,!0)}!t&&!--i.sc&&i.map&&i.map.delete(i.key)}function $k(e){const{prevDep:t,nextDep:i}=e;t&&(t.nextDep=i,e.prevDep=void 0),i&&(i.prevDep=t,e.nextDep=void 0)}let Ea=!0;const hy=[];function Es(){hy.push(Ea),Ea=!1}function ks(){const e=hy.pop();Ea=e===void 0?!0:e}function Y0(e){const{cleanup:t}=e;if(e.cleanup=void 0,t){const i=hi;hi=void 0;try{t()}finally{hi=i}}}let Ku=0;class eB{constructor(t
|
|||
|
* @vue/runtime-core v3.5.16
|
|||
|
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
|||
|
* @license MIT
|
|||
|
**/function Jc(e,t,i,n){try{return n?e(...n):e()}catch(a){oA(a,t,i)}}function Ba(e,t,i,n){if(Bt(e)){const a=Jc(e,t,i,n);return a&&J2(a)&&a.catch(s=>{oA(s,t,i)}),a}if(yt(e)){const a=[];for(let s=0;s<e.length;s++)a.push(Ba(e[s],t,i,n));return a}}function oA(e,t,i,n=!0){const a=t?t.vnode:null,{errorHandler:s,throwUnhandledErrorInProduction:r}=t&&t.appContext.config||ri;if(t){let o=t.parent;const l=t.proxy,u=`https://vuejs.org/error-reference/#runtime-${i}`;for(;o;){const f=o.ec;if(f){for(let h=0;h<f.length;h++)if(f[h](e,l,u)===!1)return}o=o.parent}if(s){Es(),Jc(s,null,10,[e,l,u]),ks();return}}kB(e,i,a,n,r)}function kB(e,t,i,n=!0,a=!1){if(a)throw e;console.error(e)}const En=[];let Ha=-1;const al=[];let Js=null,Vo=0;const By=Promise.resolve();let Lf=null;function lA(e){const t=Lf||By;return e?t.then(this?e.bind(this):e):t}function BB(e){let t=Ha+1,i=En.length;for(;t<i;){const n=t+i>>>1,a=En[n],s=Xu(a);s<e||s===e&&a.flags&2?t=n+1:i=n}return t}function n1(e){if(!(e.flags&1)){const t=Xu(e),i=En[En.length-1];!i||!(e.flags&2)&&t>=Xu(i)?En.push(e):En.splice(BB(t),0,e),e.flags|=1,Sy()}}function Sy(){Lf||(Lf=By.then(xy))}function SB(e){yt(e)?al.push(...e):Js&&e.id===-1?Js.splice(Vo+1,0,e):e.flags&1||(al.push(e),e.flags|=1),Sy()}function J0(e,t,i=Ha+1){for(;i<En.length;i++){const n=En[i];if(n&&n.flags&2){if(e&&n.id!==e.uid)continue;En.splice(i,1),i--,n.flags&4&&(n.flags&=-2),n(),n.flags&4||(n.flags&=-2)}}}function Dy(e){if(al.length){const t=[...new Set(al)].sort((i,n)=>Xu(i)-Xu(n));if(al.length=0,Js){Js.push(...t);return}for(Js=t,Vo=0;Vo<Js.length;Vo++){const i=Js[Vo];i.flags&4&&(i.flags&=-2),i.flags&8||i(),i.flags&=-2}Js=null,Vo=0}}const Xu=e=>e.id==null?e.flags&2?-1:1/0:e.id;function xy(e){try{for(Ha=0;Ha<En.length;Ha++){const t=En[Ha];t&&!(t.flags&8)&&(t.flags&4&&(t.flags&=-2),Jc(t,t.i,t.i?15:14),t.flags&4||(t.flags&=-2))}}finally{for(;Ha<En.length;Ha++){const t=En[Ha];t&&(t.flags&=-2)}Ha=-1,En.length=0,Dy(),Lf=null,(En.length||al.length)&&xy()}}let Ki=null,Ty=null;function Rf(e){const t=Ki;return Ki=e,Ty=e&&e.type.__scopeId||null,t}function gt(e,t=Ki,i){if(!t||e._n)return e;const n=(...a)=>{n._d&&lF(-1);const s=Rf(t);let r;try{r=e(...a)}finally{Rf(s),n._d&&lF(1)}return r};return n._n=!0,n._c=!0,n._d=!0,n}function Me(e,t){if(Ki===null)return e;const i=hA(Ki),n=e.dirs||(e.dirs=[]);for(let a=0;a<t.length;a++){let[s,r,o,l=ri]=t[a];s&&(Bt(s)&&(s={mounted:s,updated:s}),s.deep&&vs(r),n.push({dir:s,instance:i,value:r,oldValue:void 0,arg:o,modifiers:l}))}return e}function Nr(e,t,i,n){const a=e.dirs,s=t&&t.dirs;for(let r=0;r<a.length;r++){const o=a[r];s&&(o.oldValue=s[r].value);let l=o.dir[n];l&&(Es(),Ba(l,i,8,[e.el,o,e,t]),ks())}}const DB=Symbol("_vte"),Iy=e=>e.__isTeleport,Zs=Symbol("_leaveCb"),Id=Symbol("_enterCb");function xB(){const e={isMounted:!1,isLeaving:!1,isUnmounting:!1,leavingVNodes:new Map};return Sr(()=>{e.isMounted=!0}),wo(()=>{e.isUnmounting=!0}),e}const sa=[Function,Array],Py={mode:String,appear:Boolean,persisted:Boolean,onBeforeEnter:sa,onEnter:sa,onAfterEnter:sa,onEnterCancelled:sa,onBeforeLeave:sa,onLeave:sa,onAfterLeave:sa,onLeaveCancelled:sa,onBeforeAppear:sa,onAppear:sa,onAfterAppear:sa,onAppearCancelled:sa},My=e=>{const t=e.subTree;return t.component?My(t.component):t},TB={name:"BaseTransition",props:Py,setup(e,{slots:t}){const i=wS(),n=xB();return()=>{const a=t.default&&Uy(t.default(),!0);if(!a||!a.length)return;const s=Ly(a),r=zt(e),{mode:o}=r;if(n.isLeaving)return kg(s);const l=Z0(s);if(!l)return kg(s);let u=Gp(l,r,n,i,h=>u=h);l.type!==gn&&Ju(l,u);let f=i.subTree&&Z0(i.subTree);if(f&&f.type!==gn&&!qr(l,f)&&My(i).type!==gn){let h=Gp(f,r,n,i);if(Ju(f,h),o==="out-in"&&l.type!==gn)return n.isLeaving=!0,h.afterLeave=()=>{n.isLeaving=!1,i.job.flags&8||i.update(),delete h.afterLeave,f=void 0},kg(s);o==="in-out"&&l.type!==gn?h.delayLeave=(A,p,F)=>{const y=Ry(n,f);y[String(f.key)]=f,A[Zs]=()=>{p(),A[Zs]=void 0,delete u.delayedLeave,f=void 0},u.delayedLeave=()=>{F(),delete u.delayedLeave,f=void 0}}:f=void 0}else f&&(f=void 0);return s}}};function Ly(e){let t=e[0];if(e.length>1){for(const i of e)if(i.type!==gn){t=i;break}
|
|||
|
* @vue/runtime-dom v3.5.16
|
|||
|
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
|||
|
* @license MIT
|
|||
|
**/let Yp;const dF=typeof window<"u"&&window.trustedTypes;if(dF)try{Yp=dF.createPolicy("vue",{createHTML:e=>e})}catch{}const hw=Yp?e=>Yp.createHTML(e):e=>e,TS="http://www.w3.org/2000/svg",IS="http://www.w3.org/1998/Math/MathML",ms=typeof document<"u"?document:null,fF=ms&&ms.createElement("template"),PS={insert:(e,t,i)=>{t.insertBefore(e,i||null)},remove:e=>{const t=e.parentNode;t&&t.removeChild(e)},createElement:(e,t,i,n)=>{const a=t==="svg"?ms.createElementNS(TS,e):t==="mathml"?ms.createElementNS(IS,e):i?ms.createElement(e,{is:i}):ms.createElement(e);return e==="select"&&n&&n.multiple!=null&&a.setAttribute("multiple",n.multiple),a},createText:e=>ms.createTextNode(e),createComment:e=>ms.createComment(e),setText:(e,t)=>{e.nodeValue=t},setElementText:(e,t)=>{e.textContent=t},parentNode:e=>e.parentNode,nextSibling:e=>e.nextSibling,querySelector:e=>ms.querySelector(e),setScopeId(e,t){e.setAttribute(t,"")},insertStaticContent(e,t,i,n,a,s){const r=i?i.previousSibling:t.lastChild;if(a&&(a===s||a.nextSibling))for(;t.insertBefore(a.cloneNode(!0),i),!(a===s||!(a=a.nextSibling)););else{fF.innerHTML=hw(n==="svg"?`<svg>${e}</svg>`:n==="mathml"?`<math>${e}</math>`:e);const o=fF.content;if(n==="svg"||n==="mathml"){const l=o.firstChild;for(;l.firstChild;)o.appendChild(l.firstChild);o.removeChild(l)}t.insertBefore(o,i)}return[r?r.nextSibling:t.firstChild,i?i.previousSibling:t.lastChild]}},qs="transition",ru="animation",tc=Symbol("_vtc"),Aw={name:String,type:String,css:{type:Boolean,default:!0},duration:[String,Number,Object],enterFromClass:String,enterActiveClass:String,enterToClass:String,appearFromClass:String,appearActiveClass:String,appearToClass:String,leaveFromClass:String,leaveActiveClass:String,leaveToClass:String},MS=$i({},Py,Aw),LS=e=>(e.displayName="Transition",e.props=MS,e),fi=LS((e,{slots:t})=>ni(IB,RS(e),t)),Qr=(e,t=[])=>{yt(e)?e.forEach(i=>i(...t)):e&&e(...t)},hF=e=>e?yt(e)?e.some(t=>t.length>1):e.length>1:!1;function RS(e){const t={};for(const de in e)de in Aw||(t[de]=e[de]);if(e.css===!1)return t;const{name:i="v",type:n,duration:a,enterFromClass:s=`${i}-enter-from`,enterActiveClass:r=`${i}-enter-active`,enterToClass:o=`${i}-enter-to`,appearFromClass:l=s,appearActiveClass:u=r,appearToClass:f=o,leaveFromClass:h=`${i}-leave-from`,leaveActiveClass:A=`${i}-leave-active`,leaveToClass:p=`${i}-leave-to`}=e,F=US(a),y=F&&F[0],E=F&&F[1],{onBeforeEnter:w,onEnter:C,onEnterCancelled:B,onLeave:S,onLeaveCancelled:U,onBeforeAppear:N=w,onAppear:z=C,onAppearCancelled:Q=B}=t,R=(de,Fe,ge,le)=>{de._enterCancelled=le,jr(de,Fe?f:o),jr(de,Fe?u:r),ge&&ge()},W=(de,Fe)=>{de._isLeaving=!1,jr(de,h),jr(de,p),jr(de,A),Fe&&Fe()},Z=de=>(Fe,ge)=>{const le=de?z:C,se=()=>R(Fe,de,ge);Qr(le,[Fe,se]),AF(()=>{jr(Fe,de?l:s),ds(Fe,de?f:o),hF(le)||gF(Fe,n,y,se)})};return $i(t,{onBeforeEnter(de){Qr(w,[de]),ds(de,s),ds(de,r)},onBeforeAppear(de){Qr(N,[de]),ds(de,l),ds(de,u)},onEnter:Z(!1),onAppear:Z(!0),onLeave(de,Fe){de._isLeaving=!0;const ge=()=>W(de,Fe);ds(de,h),de._enterCancelled?(ds(de,A),_F()):(_F(),ds(de,A)),AF(()=>{de._isLeaving&&(jr(de,h),ds(de,p),hF(S)||gF(de,n,E,ge))}),Qr(S,[de,ge])},onEnterCancelled(de){R(de,!1,void 0,!0),Qr(B,[de])},onAppearCancelled(de){R(de,!0,void 0,!0),Qr(Q,[de])},onLeaveCancelled(de){W(de),Qr(U,[de])}})}function US(e){if(e==null)return null;if(Fi(e))return[Dg(e.enter),Dg(e.leave)];{const t=Dg(e);return[t,t]}}function Dg(e){return qk(e)}function ds(e,t){t.split(/\s+/).forEach(i=>i&&e.classList.add(i)),(e[tc]||(e[tc]=new Set)).add(t)}function jr(e,t){t.split(/\s+/).forEach(n=>n&&e.classList.remove(n));const i=e[tc];i&&(i.delete(t),i.size||(e[tc]=void 0))}function AF(e){requestAnimationFrame(()=>{requestAnimationFrame(e)})}let OS=0;function gF(e,t,i,n){const a=e._endId=++OS,s=()=>{a===e._endId&&n()};if(i!=null)return setTimeout(s,i);const{type:r,timeout:o,propCount:l}=NS(e,t);if(!r)return n();const u=r+"end";let f=0;const h=()=>{e.removeEventListener(u,A),s()},A=p=>{p.target===e&&++f>=l&&h()};setTimeout(()=>{f<l&&h()},o+1),e.addEventListener(u,A)}function NS(e,t){const i=window.getComputedStyle(e),n=F=>(i[F]||"").split(", "),a=n(`${qs
|
|||
|
* pinia v2.3.1
|
|||
|
* (c) 2025 Eduardo San Martin Morote
|
|||
|
* @license MIT
|
|||
|
*/let pw;const AA=e=>pw=e,mw=Symbol();function Xp(e){return e&&typeof e=="object"&&Object.prototype.toString.call(e)==="[object Object]"&&typeof e.toJSON!="function"}var Mu;(function(e){e.direct="direct",e.patchObject="patch object",e.patchFunction="patch function"})(Mu||(Mu={}));function c6(){const e=sy(!0),t=e.run(()=>wi({}));let i=[],n=[];const a=i1({install(s){AA(a),a._a=s,s.provide(mw,a),s.config.globalProperties.$pinia=a,n.forEach(r=>i.push(r)),n=[]},use(s){return this._a?i.push(s):n.push(s),this},_p:i,_a:null,_e:e,_s:new Map,state:t});return a}const _w=()=>{};function xF(e,t,i,n=_w){e.push(t);const a=()=>{const s=e.indexOf(t);s>-1&&(e.splice(s,1),n())};return!i&&ry()&&Zk(a),a}function Uo(e,...t){e.slice().forEach(i=>{i(...t)})}const d6=e=>e(),TF=Symbol(),Mg=Symbol();function Jp(e,t){e instanceof Map&&t instanceof Map?t.forEach((i,n)=>e.set(n,i)):e instanceof Set&&t instanceof Set&&t.forEach(e.add,e);for(const i in t){if(!t.hasOwnProperty(i))continue;const n=t[i],a=e[i];Xp(a)&&Xp(n)&&e.hasOwnProperty(i)&&!Ui(n)&&!cr(n)?e[i]=Jp(a,n):e[i]=n}return e}const f6=Symbol();function h6(e){return!Xp(e)||!e.hasOwnProperty(f6)}const{assign:Ks}=Object;function A6(e){return!!(Ui(e)&&e.effect)}function g6(e,t,i,n){const{state:a,actions:s,getters:r}=t,o=i.state.value[e];let l;function u(){o||(i.state.value[e]=a?a():{});const f=ky(i.state.value[e]);return Ks(f,s,Object.keys(r||{}).reduce((h,A)=>(h[A]=i1(Ri(()=>{AA(i);const p=i._s.get(e);return r[A].call(p,p)})),h),{}))}return l=Fw(e,u,t,i,n,!0),l}function Fw(e,t,i={},n,a,s){let r;const o=Ks({actions:{}},i),l={deep:!0};let u,f,h=[],A=[],p;const F=n.state.value[e];!s&&!F&&(n.state.value[e]={}),wi({});let y;function E(Q){let R;u=f=!1,typeof Q=="function"?(Q(n.state.value[e]),R={type:Mu.patchFunction,storeId:e,events:p}):(Jp(n.state.value[e],Q),R={type:Mu.patchObject,payload:Q,storeId:e,events:p});const W=y=Symbol();lA().then(()=>{y===W&&(u=!0)}),f=!0,Uo(h,R,n.state.value[e])}const w=s?function(){const{state:R}=i,W=R?R():{};this.$patch(Z=>{Ks(Z,W)})}:_w;function C(){r.stop(),h=[],A=[],n._s.delete(e)}const B=(Q,R="")=>{if(TF in Q)return Q[Mg]=R,Q;const W=function(){AA(n);const Z=Array.from(arguments),de=[],Fe=[];function ge(me){de.push(me)}function le(me){Fe.push(me)}Uo(A,{args:Z,name:W[Mg],store:U,after:ge,onError:le});let se;try{se=Q.apply(this&&this.$id===e?this:U,Z)}catch(me){throw Uo(Fe,me),me}return se instanceof Promise?se.then(me=>(Uo(de,me),me)).catch(me=>(Uo(Fe,me),Promise.reject(me))):(Uo(de,se),se)};return W[TF]=!0,W[Mg]=R,W},S={_p:n,$id:e,$onAction:xF.bind(null,A),$patch:E,$reset:w,$subscribe(Q,R={}){const W=xF(h,Q,R.detached,()=>Z()),Z=r.run(()=>dr(()=>n.state.value[e],de=>{(R.flush==="sync"?f:u)&&Q({storeId:e,type:Mu.direct,events:p},de)},Ks({},l,R)));return W},$dispose:C},U=Xc(S);n._s.set(e,U);const z=(n._a&&n._a.runWithContext||d6)(()=>n._e.run(()=>(r=sy()).run(()=>t({action:B}))));for(const Q in z){const R=z[Q];if(Ui(R)&&!A6(R)||cr(R))s||(F&&h6(R)&&(Ui(R)?R.value=F[Q]:Jp(R,F[Q])),n.state.value[e][Q]=R);else if(typeof R=="function"){const W=B(R,Q);z[Q]=W,o.actions[Q]=R}}return Ks(U,z),Ks(zt(U),z),Object.defineProperty(U,"$state",{get:()=>n.state.value[e],set:Q=>{E(R=>{Ks(R,Q)})}}),n._p.forEach(Q=>{Ks(U,r.run(()=>Q({store:U,app:n._a,pinia:n,options:o})))}),F&&s&&i.hydrate&&i.hydrate(U.$state,F),u=!0,f=!0,U}/*! #__NO_SIDE_EFFECTS__ */function ei(e,t,i){let n,a;const s=typeof t=="function";typeof e=="string"?(n=e,a=s?i:t):(a=e,n=e.id);function r(o,l){const u=XB();return o=o||(u?ka(mw,null):null),o&&AA(o),o=pw,o._s.has(n)||(s?Fw(n,t,a,o):g6(n,a,o)),o._s.get(n)}return r.$id=n,r}function J(e,t){return Array.isArray(t)?t.reduce((i,n)=>(i[n]=function(){return e(this.$pinia)[n]},i),{}):Object.keys(t).reduce((i,n)=>(i[n]=function(){const a=e(this.$pinia),s=t[n];return typeof s=="function"?s.call(this,a):a[s]},i),{})}function Te(e,t){return Array.isArray(t)?t.reduce((i,n)=>(i[n]=function(...a){return e(this.$pinia)[n](...a)},i),{}):Object.keys(t).reduce((i,n)=>(i[n]=function(...a){return e(this.$pinia)[t[n]](...a)},i),{})}const oe=ei("phone",{state:()=>({show:!1,notifyshow:!1,la
|
|||
|
`).forEach(function(r){a=r.indexOf(":"),i=r.substring(0,a).trim().toLowerCase(),n=r.substring(a+1).trim(),!(!i||t[i]&&_x[i])&&(i==="set-cookie"?t[i]?t[i].push(n):t[i]=[n]:t[i]=t[i]?t[i]+", "+n:n)}),t},KF=Symbol("internals");function cu(e){return e&&String(e).trim().toLowerCase()}function bf(e){return e===!1||e==null?e:Oe.isArray(e)?e.map(bf):String(e)}function bx(e){const t=Object.create(null),i=/([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;let n;for(;n=i.exec(e);)t[n[1]]=n[2];return t}const vx=e=>/^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(e.trim());function Ug(e,t,i,n,a){if(Oe.isFunction(n))return n.call(this,t,i);if(a&&(t=i),!!Oe.isString(t)){if(Oe.isString(n))return t.indexOf(n)!==-1;if(Oe.isRegExp(n))return n.test(t)}}function yx(e){return e.trim().toLowerCase().replace(/([a-z\d])(\w*)/g,(t,i,n)=>i.toUpperCase()+n)}function wx(e,t){const i=Oe.toCamelCase(" "+t);["get","set","has"].forEach(n=>{Object.defineProperty(e,n+i,{value:function(a,s,r){return this[n].call(this,t,a,s,r)},configurable:!0})})}let Nn=class{constructor(t){t&&this.set(t)}set(t,i,n){const a=this;function s(o,l,u){const f=cu(l);if(!f)throw new Error("header name must be a non-empty string");const h=Oe.findKey(a,f);(!h||a[h]===void 0||u===!0||u===void 0&&a[h]!==!1)&&(a[h||l]=bf(o))}const r=(o,l)=>Oe.forEach(o,(u,f)=>s(u,f,l));if(Oe.isPlainObject(t)||t instanceof this.constructor)r(t,i);else if(Oe.isString(t)&&(t=t.trim())&&!vx(t))r(Fx(t),i);else if(Oe.isObject(t)&&Oe.isIterable(t)){let o={},l,u;for(const f of t){if(!Oe.isArray(f))throw TypeError("Object iterator must return a key-value pair");o[u=f[0]]=(l=o[u])?Oe.isArray(l)?[...l,f[1]]:[l,f[1]]:f[1]}r(o,i)}else t!=null&&s(i,t,n);return this}get(t,i){if(t=cu(t),t){const n=Oe.findKey(this,t);if(n){const a=this[n];if(!i)return a;if(i===!0)return bx(a);if(Oe.isFunction(i))return i.call(this,a,n);if(Oe.isRegExp(i))return i.exec(a);throw new TypeError("parser must be boolean|regexp|function")}}}has(t,i){if(t=cu(t),t){const n=Oe.findKey(this,t);return!!(n&&this[n]!==void 0&&(!i||Ug(this,this[n],n,i)))}return!1}delete(t,i){const n=this;let a=!1;function s(r){if(r=cu(r),r){const o=Oe.findKey(n,r);o&&(!i||Ug(n,n[o],o,i))&&(delete n[o],a=!0)}}return Oe.isArray(t)?t.forEach(s):s(t),a}clear(t){const i=Object.keys(this);let n=i.length,a=!1;for(;n--;){const s=i[n];(!t||Ug(this,this[s],s,t,!0))&&(delete this[s],a=!0)}return a}normalize(t){const i=this,n={};return Oe.forEach(this,(a,s)=>{const r=Oe.findKey(n,s);if(r){i[r]=bf(a),delete i[s];return}const o=t?yx(s):String(s).trim();o!==s&&delete i[s],i[o]=bf(a),n[o]=!0}),this}concat(...t){return this.constructor.concat(this,...t)}toJSON(t){const i=Object.create(null);return Oe.forEach(this,(n,a)=>{n!=null&&n!==!1&&(i[a]=t&&Oe.isArray(n)?n.join(", "):n)}),i}[Symbol.iterator](){return Object.entries(this.toJSON())[Symbol.iterator]()}toString(){return Object.entries(this.toJSON()).map(([t,i])=>t+": "+i).join(`
|
|||
|
`)}getSetCookie(){return this.get("set-cookie")||[]}get[Symbol.toStringTag](){return"AxiosHeaders"}static from(t){return t instanceof this?t:new this(t)}static concat(t,...i){const n=new this(t);return i.forEach(a=>n.set(a)),n}static accessor(t){const n=(this[KF]=this[KF]={accessors:{}}).accessors,a=this.prototype;function s(r){const o=cu(r);n[o]||(wx(a,r),n[o]=!0)}return Oe.isArray(t)?t.forEach(s):s(t),this}};Nn.accessor(["Content-Type","Content-Length","Accept","Accept-Encoding","User-Agent","Authorization"]);Oe.reduceDescriptors(Nn.prototype,({value:e},t)=>{let i=t[0].toUpperCase()+t.slice(1);return{get:()=>e,set(n){this[i]=n}}});Oe.freezeMethods(Nn);function Og(e,t){const i=this||ed,n=t||i,a=Nn.from(n.headers);let s=n.data;return Oe.forEach(e,function(o){s=o.call(i,s,a.normalize(),t?t.status:void 0)}),a.normalize(),s}function Ww(e){return!!(e&&e.__CANCEL__)}function Nl(e,t,i){Tt.call(this,e??"canceled",Tt.ERR_CANCELED,t,i),this.name="CanceledError"}Oe.inherits(Nl,Tt,{__CANCEL__:!0});function Kw(e,t,i){const n=i.config.validateStatus;!i.status||!n||n(i.status)?e(i):t(new Tt("Request failed with status code "+i.status,[Tt.ERR_BAD_REQUEST,Tt.ERR_BAD_RESPONSE][Math.floor(i.status/100)-4],i.config,i.request,i))}function Cx(e){const t=/^([-+\w]{1,25})(:?\/\/|:)/.exec(e);return t&&t[1]||""}function Ex(e,t){e=e||10;const i=new Array(e),n=new Array(e);let a=0,s=0,r;return t=t!==void 0?t:1e3,function(l){const u=Date.now(),f=n[s];r||(r=u),i[a]=l,n[a]=u;let h=s,A=0;for(;h!==a;)A+=i[h++],h=h%e;if(a=(a+1)%e,a===s&&(s=(s+1)%e),u-r<t)return;const p=f&&u-f;return p?Math.round(A*1e3/p):void 0}}function kx(e,t){let i=0,n=1e3/t,a,s;const r=(u,f=Date.now())=>{i=f,a=null,s&&(clearTimeout(s),s=null),e.apply(null,u)};return[(...u)=>{const f=Date.now(),h=f-i;h>=n?r(u,f):(a=u,s||(s=setTimeout(()=>{s=null,r(a)},n-h)))},()=>a&&r(a)]}const jf=(e,t,i=3)=>{let n=0;const a=Ex(50,250);return kx(s=>{const r=s.loaded,o=s.lengthComputable?s.total:void 0,l=r-n,u=a(l),f=r<=o;n=r;const h={loaded:r,total:o,progress:o?r/o:void 0,bytes:l,rate:u||void 0,estimated:u&&o&&f?(o-r)/u:void 0,event:s,lengthComputable:o!=null,[t?"download":"upload"]:!0};e(h)},i)},YF=(e,t)=>{const i=e!=null;return[n=>t[0]({lengthComputable:i,total:e,loaded:n}),t[1]]},XF=e=>(...t)=>Oe.asap(()=>e(...t)),Bx=pn.hasStandardBrowserEnv?((e,t)=>i=>(i=new URL(i,pn.origin),e.protocol===i.protocol&&e.host===i.host&&(t||e.port===i.port)))(new URL(pn.origin),pn.navigator&&/(msie|trident)/i.test(pn.navigator.userAgent)):()=>!0,Sx=pn.hasStandardBrowserEnv?{write(e,t,i,n,a,s){const r=[e+"="+encodeURIComponent(t)];Oe.isNumber(i)&&r.push("expires="+new Date(i).toGMTString()),Oe.isString(n)&&r.push("path="+n),Oe.isString(a)&&r.push("domain="+a),s===!0&&r.push("secure"),document.cookie=r.join("; ")},read(e){const t=document.cookie.match(new RegExp("(^|;\\s*)("+e+")=([^;]*)"));return t?decodeURIComponent(t[3]):null},remove(e){this.write(e,"",Date.now()-864e5)}}:{write(){},read(){return null},remove(){}};function Dx(e){return/^([a-z][a-z\d+\-.]*:)?\/\//i.test(e)}function xx(e,t){return t?e.replace(/\/?\/$/,"")+"/"+t.replace(/^\/+/,""):e}function Yw(e,t,i){let n=!Dx(t);return e&&(n||i==!1)?xx(e,t):t}const JF=e=>e instanceof Nn?{...e}:e;function lo(e,t){t=t||{};const i={};function n(u,f,h,A){return Oe.isPlainObject(u)&&Oe.isPlainObject(f)?Oe.merge.call({caseless:A},u,f):Oe.isPlainObject(f)?Oe.merge({},f):Oe.isArray(f)?f.slice():f}function a(u,f,h,A){if(Oe.isUndefined(f)){if(!Oe.isUndefined(u))return n(void 0,u,h,A)}else return n(u,f,h,A)}function s(u,f){if(!Oe.isUndefined(f))return n(void 0,f)}function r(u,f){if(Oe.isUndefined(f)){if(!Oe.isUndefined(u))return n(void 0,u)}else return n(void 0,f)}function o(u,f,h){if(h in t)return n(u,f);if(h in e)return n(void 0,u)}const l={url:s,method:s,data:s,baseURL:r,transformRequest:r,transformResponse:r,paramsSerializer:r,timeout:r,timeoutMessage:r,withCredentials:r,withXSRFToken:r,adapter:r,responseType:r,xsrfCookieName:r,xsrfHeaderName:r,onUploadProgress:r,onDownloadProgress:r,decompress:r,maxContentLength:r,maxBodyLength:r,beforeRedirect:r,transport:r,httpAgent:r,h
|
|||
|
`+s.map(eb).join(`
|
|||
|
`):" "+eb(s[0]):"as no adapter specified";throw new Tt("There is no suitable adapter to dispatch the request "+r,"ERR_NOT_SUPPORT")}return n},adapters:rm};function Ng(e){if(e.cancelToken&&e.cancelToken.throwIfRequested(),e.signal&&e.signal.aborted)throw new Nl(null,e)}function tb(e){return Ng(e),e.headers=Nn.from(e.headers),e.data=Og.call(e,e.transformRequest),["post","put","patch"].indexOf(e.method)!==-1&&e.headers.setContentType("application/x-www-form-urlencoded",!1),$w.getAdapter(e.adapter||ed.adapter)(e).then(function(n){return Ng(e),n.data=Og.call(e,e.transformResponse,n),n.headers=Nn.from(n.headers),n},function(n){return Ww(n)||(Ng(e),n&&n.response&&(n.response.data=Og.call(e,e.transformResponse,n.response),n.response.headers=Nn.from(n.response.headers))),Promise.reject(n)})}const e3="1.9.0",wA={};["object","boolean","number","function","string","symbol"].forEach((e,t)=>{wA[e]=function(n){return typeof n===e||"a"+(t<1?"n ":" ")+e}});const ib={};wA.transitional=function(t,i,n){function a(s,r){return"[Axios v"+e3+"] Transitional option '"+s+"'"+r+(n?". "+n:"")}return(s,r,o)=>{if(t===!1)throw new Tt(a(r," has been removed"+(i?" in "+i:"")),Tt.ERR_DEPRECATED);return i&&!ib[r]&&(ib[r]=!0,console.warn(a(r," has been deprecated since v"+i+" and will be removed in the near future"))),t?t(s,r,o):!0}};wA.spelling=function(t){return(i,n)=>(console.warn(`${n} is likely a misspelling of ${t}`),!0)};function Gx(e,t,i){if(typeof e!="object")throw new Tt("options must be an object",Tt.ERR_BAD_OPTION_VALUE);const n=Object.keys(e);let a=n.length;for(;a-- >0;){const s=n[a],r=t[s];if(r){const o=e[s],l=o===void 0||r(o,s,e);if(l!==!0)throw new Tt("option "+s+" must be "+l,Tt.ERR_BAD_OPTION_VALUE);continue}if(i!==!0)throw new Tt("Unknown option "+s,Tt.ERR_BAD_OPTION)}}const vf={assertOptions:Gx,validators:wA},Oa=vf.validators;let no=class{constructor(t){this.defaults=t||{},this.interceptors={request:new WF,response:new WF}}async request(t,i){try{return await this._request(t,i)}catch(n){if(n instanceof Error){let a={};Error.captureStackTrace?Error.captureStackTrace(a):a=new Error;const s=a.stack?a.stack.replace(/^.+\n/,""):"";try{n.stack?s&&!String(n.stack).endsWith(s.replace(/^.+\n.+\n/,""))&&(n.stack+=`
|
|||
|
`+s):n.stack=s}catch{}}throw n}}_request(t,i){typeof t=="string"?(i=i||{},i.url=t):i=t||{},i=lo(this.defaults,i);const{transitional:n,paramsSerializer:a,headers:s}=i;n!==void 0&&vf.assertOptions(n,{silentJSONParsing:Oa.transitional(Oa.boolean),forcedJSONParsing:Oa.transitional(Oa.boolean),clarifyTimeoutError:Oa.transitional(Oa.boolean)},!1),a!=null&&(Oe.isFunction(a)?i.paramsSerializer={serialize:a}:vf.assertOptions(a,{encode:Oa.function,serialize:Oa.function},!0)),i.allowAbsoluteUrls!==void 0||(this.defaults.allowAbsoluteUrls!==void 0?i.allowAbsoluteUrls=this.defaults.allowAbsoluteUrls:i.allowAbsoluteUrls=!0),vf.assertOptions(i,{baseUrl:Oa.spelling("baseURL"),withXsrfToken:Oa.spelling("withXSRFToken")},!0),i.method=(i.method||this.defaults.method||"get").toLowerCase();let r=s&&Oe.merge(s.common,s[i.method]);s&&Oe.forEach(["delete","get","head","post","put","patch","common"],F=>{delete s[F]}),i.headers=Nn.concat(r,s);const o=[];let l=!0;this.interceptors.request.forEach(function(y){typeof y.runWhen=="function"&&y.runWhen(i)===!1||(l=l&&y.synchronous,o.unshift(y.fulfilled,y.rejected))});const u=[];this.interceptors.response.forEach(function(y){u.push(y.fulfilled,y.rejected)});let f,h=0,A;if(!l){const F=[tb.bind(this),void 0];for(F.unshift.apply(F,o),F.push.apply(F,u),A=F.length,f=Promise.resolve(i);h<A;)f=f.then(F[h++],F[h++]);return f}A=o.length;let p=i;for(h=0;h<A;){const F=o[h++],y=o[h++];try{p=F(p)}catch(E){y.call(this,E);break}}try{f=tb.call(this,p)}catch(F){return Promise.reject(F)}for(h=0,A=u.length;h<A;)f=f.then(u[h++],u[h++]);return f}getUri(t){t=lo(this.defaults,t);const i=Yw(t.baseURL,t.url,t.allowAbsoluteUrls);return zw(i,t.params,t.paramsSerializer)}};Oe.forEach(["delete","get","head","options"],function(t){no.prototype[t]=function(i,n){return this.request(lo(n||{},{method:t,url:i,data:(n||{}).data}))}});Oe.forEach(["post","put","patch"],function(t){function i(n){return function(s,r,o){return this.request(lo(o||{},{method:t,headers:n?{"Content-Type":"multipart/form-data"}:{},url:s,data:r}))}}no.prototype[t]=i(),no.prototype[t+"Form"]=i(!0)});let zx=class t3{constructor(t){if(typeof t!="function")throw new TypeError("executor must be a function.");let i;this.promise=new Promise(function(s){i=s});const n=this;this.promise.then(a=>{if(!n._listeners)return;let s=n._listeners.length;for(;s-- >0;)n._listeners[s](a);n._listeners=null}),this.promise.then=a=>{let s;const r=new Promise(o=>{n.subscribe(o),s=o}).then(a);return r.cancel=function(){n.unsubscribe(s)},r},t(function(s,r,o){n.reason||(n.reason=new Nl(s,r,o),i(n.reason))})}throwIfRequested(){if(this.reason)throw this.reason}subscribe(t){if(this.reason){t(this.reason);return}this._listeners?this._listeners.push(t):this._listeners=[t]}unsubscribe(t){if(!this._listeners)return;const i=this._listeners.indexOf(t);i!==-1&&this._listeners.splice(i,1)}toAbortSignal(){const t=new AbortController,i=n=>{t.abort(n)};return this.subscribe(i),t.signal.unsubscribe=()=>this.unsubscribe(i),t.signal}static source(){let t;return{token:new t3(function(a){t=a}),cancel:t}}};function qx(e){return function(i){return e.apply(null,i)}}function Vx(e){return Oe.isObject(e)&&e.isAxiosError===!0}const om={Continue:100,SwitchingProtocols:101,Processing:102,EarlyHints:103,Ok:200,Created:201,Accepted:202,NonAuthoritativeInformation:203,NoContent:204,ResetContent:205,PartialContent:206,MultiStatus:207,AlreadyReported:208,ImUsed:226,MultipleChoices:300,MovedPermanently:301,Found:302,SeeOther:303,NotModified:304,UseProxy:305,Unused:306,TemporaryRedirect:307,PermanentRedirect:308,BadRequest:400,Unauthorized:401,PaymentRequired:402,Forbidden:403,NotFound:404,MethodNotAllowed:405,NotAcceptable:406,ProxyAuthenticationRequired:407,RequestTimeout:408,Conflict:409,Gone:410,LengthRequired:411,PreconditionFailed:412,PayloadTooLarge:413,UriTooLong:414,UnsupportedMediaType:415,RangeNotSatisfiable:416,ExpectationFailed:417,ImATeapot:418,MisdirectedRequest:421,UnprocessableEntity:422,Locked:423,FailedDependency:424,TooEarly:425,UpgradeRequired:426,PreconditionRequired:428,TooManyRequests:429,RequestHeaderFie
|
|||
|
* vue-router v4.5.1
|
|||
|
* (c) 2025 Eduardo San Martin Morote
|
|||
|
* @license MIT
|
|||
|
*/const Ko=typeof document<"u";function n3(e){return typeof e=="object"||"displayName"in e||"props"in e||"__vccOpts"in e}function Wx(e){return e.__esModule||e[Symbol.toStringTag]==="Module"||e.default&&n3(e.default)}const Jt=Object.assign;function Hg(e,t){const i={};for(const n in t){const a=t[n];i[n]=Sa(a)?a.map(e):e(a)}return i}const Lu=()=>{},Sa=Array.isArray,a3=/#/g,Kx=/&/g,Yx=/\//g,Xx=/=/g,Jx=/\?/g,s3=/\+/g,Zx=/%5B/g,$x=/%5D/g,r3=/%5E/g,eT=/%60/g,o3=/%7B/g,tT=/%7C/g,l3=/%7D/g,iT=/%20/g;function g1(e){return encodeURI(""+e).replace(tT,"|").replace(Zx,"[").replace($x,"]")}function nT(e){return g1(e).replace(o3,"{").replace(l3,"}").replace(r3,"^")}function lm(e){return g1(e).replace(s3,"%2B").replace(iT,"+").replace(a3,"%23").replace(Kx,"%26").replace(eT,"`").replace(o3,"{").replace(l3,"}").replace(r3,"^")}function aT(e){return lm(e).replace(Xx,"%3D")}function sT(e){return g1(e).replace(a3,"%23").replace(Jx,"%3F")}function rT(e){return e==null?"":sT(e).replace(Yx,"%2F")}function ac(e){try{return decodeURIComponent(""+e)}catch{}return""+e}const oT=/\/$/,lT=e=>e.replace(oT,"");function Qg(e,t,i="/"){let n,a={},s="",r="";const o=t.indexOf("#");let l=t.indexOf("?");return o<l&&o>=0&&(l=-1),l>-1&&(n=t.slice(0,l),s=t.slice(l+1,o>-1?o:t.length),a=e(s)),o>-1&&(n=n||t.slice(0,o),r=t.slice(o,t.length)),n=fT(n??t,i),{fullPath:n+(s&&"?")+s+r,path:n,query:a,hash:ac(r)}}function uT(e,t){const i=t.query?e(t.query):"";return t.path+(i&&"?")+i+(t.hash||"")}function nb(e,t){return!t||!e.toLowerCase().startsWith(t.toLowerCase())?e:e.slice(t.length)||"/"}function cT(e,t,i){const n=t.matched.length-1,a=i.matched.length-1;return n>-1&&n===a&&hl(t.matched[n],i.matched[a])&&u3(t.params,i.params)&&e(t.query)===e(i.query)&&t.hash===i.hash}function hl(e,t){return(e.aliasOf||e)===(t.aliasOf||t)}function u3(e,t){if(Object.keys(e).length!==Object.keys(t).length)return!1;for(const i in e)if(!dT(e[i],t[i]))return!1;return!0}function dT(e,t){return Sa(e)?ab(e,t):Sa(t)?ab(t,e):e===t}function ab(e,t){return Sa(t)?e.length===t.length&&e.every((i,n)=>i===t[n]):e.length===1&&e[0]===t}function fT(e,t){if(e.startsWith("/"))return e;if(!e)return t;const i=t.split("/"),n=e.split("/"),a=n[n.length-1];(a===".."||a===".")&&n.push("");let s=i.length-1,r,o;for(r=0;r<n.length;r++)if(o=n[r],o!==".")if(o==="..")s>1&&s--;else break;return i.slice(0,s).join("/")+"/"+n.slice(r).join("/")}const Vs={path:"/",name:void 0,params:{},query:{},hash:"",fullPath:"/",matched:[],meta:{},redirectedFrom:void 0};var sc;(function(e){e.pop="pop",e.push="push"})(sc||(sc={}));var Ru;(function(e){e.back="back",e.forward="forward",e.unknown=""})(Ru||(Ru={}));function hT(e){if(!e)if(Ko){const t=document.querySelector("base");e=t&&t.getAttribute("href")||"/",e=e.replace(/^\w+:\/\/[^\/]+/,"")}else e="/";return e[0]!=="/"&&e[0]!=="#"&&(e="/"+e),lT(e)}const AT=/^[^#]+#/;function gT(e,t){return e.replace(AT,"#")+t}function pT(e,t){const i=document.documentElement.getBoundingClientRect(),n=e.getBoundingClientRect();return{behavior:t.behavior,left:n.left-i.left-(t.left||0),top:n.top-i.top-(t.top||0)}}const CA=()=>({left:window.scrollX,top:window.scrollY});function mT(e){let t;if("el"in e){const i=e.el,n=typeof i=="string"&&i.startsWith("#"),a=typeof i=="string"?n?document.getElementById(i.slice(1)):document.querySelector(i):i;if(!a)return;t=pT(a,e)}else t=e;"scrollBehavior"in document.documentElement.style?window.scrollTo(t):window.scrollTo(t.left!=null?t.left:window.scrollX,t.top!=null?t.top:window.scrollY)}function sb(e,t){return(history.state?history.state.position-t:-1)+e}const um=new Map;function _T(e,t){um.set(e,t)}function FT(e){const t=um.get(e);return um.delete(e),t}let bT=()=>location.protocol+"//"+location.host;function c3(e,t){const{pathname:i,search:n,hash:a}=t,s=e.indexOf("#");if(s>-1){let o=a.includes(e.slice(s))?e.slice(s).length:1,l=a.slice(o);return l[0]!=="/"&&(l="/"+l),nb(l,"")}return nb(i,e)+n+a}function vT(e,t,i,n){let a=[],s=[],r=null;const o=({state:A})=>{const p=c3(e,location),F=i.value,y=t.value;let E=0;if(A){if(i.value=p,t.value=A,r&&r===F){r=null;return}E=y?A.position
|
|||
|
#ifdef USE_ALPHAMAP
|
|||
|
|
|||
|
diffuseColor.a *= texture2D( alphaMap, vUv ).g;
|
|||
|
|
|||
|
#endif
|
|||
|
`,BQ=`
|
|||
|
#ifdef USE_ALPHAMAP
|
|||
|
|
|||
|
uniform sampler2D alphaMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,SQ=`
|
|||
|
#ifdef ALPHATEST
|
|||
|
|
|||
|
if ( diffuseColor.a < ALPHATEST ) discard;
|
|||
|
|
|||
|
#endif
|
|||
|
`,DQ=`
|
|||
|
#ifdef USE_AOMAP
|
|||
|
|
|||
|
// reads channel R, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
|
|||
|
float ambientOcclusion = ( texture2D( aoMap, vUv2 ).r - 1.0 ) * aoMapIntensity + 1.0;
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse *= ambientOcclusion;
|
|||
|
|
|||
|
#if defined( USE_ENVMAP ) && defined( PHYSICAL )
|
|||
|
|
|||
|
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
|
|||
|
reflectedLight.indirectSpecular *= computeSpecularOcclusion( dotNV, ambientOcclusion, material.specularRoughness );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,xQ=`
|
|||
|
#ifdef USE_AOMAP
|
|||
|
|
|||
|
uniform sampler2D aoMap;
|
|||
|
uniform float aoMapIntensity;
|
|||
|
|
|||
|
#endif
|
|||
|
`,TQ=`
|
|||
|
vec3 transformed = vec3( position );
|
|||
|
`,IQ=`
|
|||
|
vec3 objectNormal = vec3( normal );
|
|||
|
`,PQ=`
|
|||
|
float punctualLightIntensityToIrradianceFactor( const in float lightDistance, const in float cutoffDistance, const in float decayExponent ) {
|
|||
|
|
|||
|
#if defined ( PHYSICALLY_CORRECT_LIGHTS )
|
|||
|
|
|||
|
// based upon Frostbite 3 Moving to Physically-based Rendering
|
|||
|
// page 32, equation 26: E[window1]
|
|||
|
// https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
|
|||
|
// this is intended to be used on spot and point lights who are represented as luminous intensity
|
|||
|
// but who must be converted to luminous irradiance for surface lighting calculation
|
|||
|
float distanceFalloff = 1.0 / max( pow( lightDistance, decayExponent ), 0.01 );
|
|||
|
|
|||
|
if( cutoffDistance > 0.0 ) {
|
|||
|
|
|||
|
distanceFalloff *= pow2( saturate( 1.0 - pow4( lightDistance / cutoffDistance ) ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
return distanceFalloff;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
if( cutoffDistance > 0.0 && decayExponent > 0.0 ) {
|
|||
|
|
|||
|
return pow( saturate( -lightDistance / cutoffDistance + 1.0 ), decayExponent );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
return 1.0;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 BRDF_Diffuse_Lambert( const in vec3 diffuseColor ) {
|
|||
|
|
|||
|
return RECIPROCAL_PI * diffuseColor;
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
vec3 F_Schlick( const in vec3 specularColor, const in float dotLH ) {
|
|||
|
|
|||
|
// Original approximation by Christophe Schlick '94
|
|||
|
// float fresnel = pow( 1.0 - dotLH, 5.0 );
|
|||
|
|
|||
|
// Optimized variant (presented by Epic at SIGGRAPH '13)
|
|||
|
// https://cdn2.unrealengine.com/Resources/files/2013SiggraphPresentationsNotes-26915738.pdf
|
|||
|
float fresnel = exp2( ( -5.55473 * dotLH - 6.98316 ) * dotLH );
|
|||
|
|
|||
|
return ( 1.0 - specularColor ) * fresnel + specularColor;
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
// Microfacet Models for Refraction through Rough Surfaces - equation (34)
|
|||
|
// http://graphicrants.blogspot.com/2013/08/specular-brdf-reference.html
|
|||
|
// alpha is "roughness squared" in Disney’s reparameterization
|
|||
|
float G_GGX_Smith( const in float alpha, const in float dotNL, const in float dotNV ) {
|
|||
|
|
|||
|
// geometry term (normalized) = G(l)⋅G(v) / 4(n⋅l)(n⋅v)
|
|||
|
// also see #12151
|
|||
|
|
|||
|
float a2 = pow2( alpha );
|
|||
|
|
|||
|
float gl = dotNL + sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );
|
|||
|
float gv = dotNV + sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );
|
|||
|
|
|||
|
return 1.0 / ( gl * gv );
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
// Moving Frostbite to Physically Based Rendering 3.0 - page 12, listing 2
|
|||
|
// https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
|
|||
|
float G_GGX_SmithCorrelated( const in float alpha, const in float dotNL, const in float dotNV ) {
|
|||
|
|
|||
|
float a2 = pow2( alpha );
|
|||
|
|
|||
|
// dotNL and dotNV are explicitly swapped. This is not a mistake.
|
|||
|
float gv = dotNL * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );
|
|||
|
float gl = dotNV * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );
|
|||
|
|
|||
|
return 0.5 / max( gv + gl, EPSILON );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// Microfacet Models for Refraction through Rough Surfaces - equation (33)
|
|||
|
// http://graphicrants.blogspot.com/2013/08/specular-brdf-reference.html
|
|||
|
// alpha is "roughness squared" in Disney’s reparameterization
|
|||
|
float D_GGX( const in float alpha, const in float dotNH ) {
|
|||
|
|
|||
|
float a2 = pow2( alpha );
|
|||
|
|
|||
|
float denom = pow2( dotNH ) * ( a2 - 1.0 ) + 1.0; // avoid alpha = 0 with dotNH = 1
|
|||
|
|
|||
|
return RECIPROCAL_PI * a2 / pow2( denom );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// GGX Distribution, Schlick Fresnel, GGX-Smith Visibility
|
|||
|
vec3 BRDF_Specular_GGX( const in IncidentLight incidentLight, const in GeometricContext geometry, const in vec3 specularColor, const in float roughness ) {
|
|||
|
|
|||
|
float alpha = pow2( roughness ); // UE4's roughness
|
|||
|
|
|||
|
vec3 halfDir = normalize( incidentLight.direction + geometry.viewDir );
|
|||
|
|
|||
|
float dotNL = saturate( dot( geometry.normal, incidentLight.direction ) );
|
|||
|
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
float dotNH = saturate( dot( geometry.normal, halfDir ) );
|
|||
|
float dotLH = saturate( dot( incidentLight.direction, halfDir ) );
|
|||
|
|
|||
|
vec3 F = F_Schlick( specularColor, dotLH );
|
|||
|
|
|||
|
float G = G_GGX_SmithCorrelated( alpha, dotNL, dotNV );
|
|||
|
|
|||
|
float D = D_GGX( alpha, dotNH );
|
|||
|
|
|||
|
return F * ( G * D );
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
// Rect Area Light
|
|||
|
|
|||
|
// Real-Time Polygonal-Light Shading with Linearly Transformed Cosines
|
|||
|
// by Eric Heitz, Jonathan Dupuy, Stephen Hill and David Neubelt
|
|||
|
// code: https://github.com/selfshadow/ltc_code/
|
|||
|
|
|||
|
vec2 LTC_Uv( const in vec3 N, const in vec3 V, const in float roughness ) {
|
|||
|
|
|||
|
const float LUT_SIZE = 64.0;
|
|||
|
const float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;
|
|||
|
const float LUT_BIAS = 0.5 / LUT_SIZE;
|
|||
|
|
|||
|
float dotNV = saturate( dot( N, V ) );
|
|||
|
|
|||
|
// texture parameterized by sqrt( GGX alpha ) and sqrt( 1 - cos( theta ) )
|
|||
|
vec2 uv = vec2( roughness, sqrt( 1.0 - dotNV ) );
|
|||
|
|
|||
|
uv = uv * LUT_SCALE + LUT_BIAS;
|
|||
|
|
|||
|
return uv;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float LTC_ClippedSphereFormFactor( const in vec3 f ) {
|
|||
|
|
|||
|
// Real-Time Area Lighting: a Journey from Research to Production (p.102)
|
|||
|
// An approximation of the form factor of a horizon-clipped rectangle.
|
|||
|
|
|||
|
float l = length( f );
|
|||
|
|
|||
|
return max( ( l * l + f.z ) / ( l + 1.0 ), 0.0 );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 LTC_EdgeVectorFormFactor( const in vec3 v1, const in vec3 v2 ) {
|
|||
|
|
|||
|
float x = dot( v1, v2 );
|
|||
|
|
|||
|
float y = abs( x );
|
|||
|
|
|||
|
// rational polynomial approximation to theta / sin( theta ) / 2PI
|
|||
|
float a = 0.8543985 + ( 0.4965155 + 0.0145206 * y ) * y;
|
|||
|
float b = 3.4175940 + ( 4.1616724 + y ) * y;
|
|||
|
float v = a / b;
|
|||
|
|
|||
|
float theta_sintheta = ( x > 0.0 ) ? v : 0.5 * inversesqrt( max( 1.0 - x * x, 1e-7 ) ) - v;
|
|||
|
|
|||
|
return cross( v1, v2 ) * theta_sintheta;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 LTC_Evaluate( const in vec3 N, const in vec3 V, const in vec3 P, const in mat3 mInv, const in vec3 rectCoords[ 4 ] ) {
|
|||
|
|
|||
|
// bail if point is on back side of plane of light
|
|||
|
// assumes ccw winding order of light vertices
|
|||
|
vec3 v1 = rectCoords[ 1 ] - rectCoords[ 0 ];
|
|||
|
vec3 v2 = rectCoords[ 3 ] - rectCoords[ 0 ];
|
|||
|
vec3 lightNormal = cross( v1, v2 );
|
|||
|
|
|||
|
if( dot( lightNormal, P - rectCoords[ 0 ] ) < 0.0 ) return vec3( 0.0 );
|
|||
|
|
|||
|
// construct orthonormal basis around N
|
|||
|
vec3 T1, T2;
|
|||
|
T1 = normalize( V - N * dot( V, N ) );
|
|||
|
T2 = - cross( N, T1 ); // negated from paper; possibly due to a different handedness of world coordinate system
|
|||
|
|
|||
|
// compute transform
|
|||
|
mat3 mat = mInv * transposeMat3( mat3( T1, T2, N ) );
|
|||
|
|
|||
|
// transform rect
|
|||
|
vec3 coords[ 4 ];
|
|||
|
coords[ 0 ] = mat * ( rectCoords[ 0 ] - P );
|
|||
|
coords[ 1 ] = mat * ( rectCoords[ 1 ] - P );
|
|||
|
coords[ 2 ] = mat * ( rectCoords[ 2 ] - P );
|
|||
|
coords[ 3 ] = mat * ( rectCoords[ 3 ] - P );
|
|||
|
|
|||
|
// project rect onto sphere
|
|||
|
coords[ 0 ] = normalize( coords[ 0 ] );
|
|||
|
coords[ 1 ] = normalize( coords[ 1 ] );
|
|||
|
coords[ 2 ] = normalize( coords[ 2 ] );
|
|||
|
coords[ 3 ] = normalize( coords[ 3 ] );
|
|||
|
|
|||
|
// calculate vector form factor
|
|||
|
vec3 vectorFormFactor = vec3( 0.0 );
|
|||
|
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 0 ], coords[ 1 ] );
|
|||
|
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 1 ], coords[ 2 ] );
|
|||
|
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 2 ], coords[ 3 ] );
|
|||
|
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 3 ], coords[ 0 ] );
|
|||
|
|
|||
|
// adjust for horizon clipping
|
|||
|
float result = LTC_ClippedSphereFormFactor( vectorFormFactor );
|
|||
|
|
|||
|
/*
|
|||
|
// alternate method of adjusting for horizon clipping (see referece)
|
|||
|
// refactoring required
|
|||
|
float len = length( vectorFormFactor );
|
|||
|
float z = vectorFormFactor.z / len;
|
|||
|
|
|||
|
const float LUT_SIZE = 64.0;
|
|||
|
const float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;
|
|||
|
const float LUT_BIAS = 0.5 / LUT_SIZE;
|
|||
|
|
|||
|
// tabulated horizon-clipped sphere, apparently...
|
|||
|
vec2 uv = vec2( z * 0.5 + 0.5, len );
|
|||
|
uv = uv * LUT_SCALE + LUT_BIAS;
|
|||
|
|
|||
|
float scale = texture2D( ltc_2, uv ).w;
|
|||
|
|
|||
|
float result = len * scale;
|
|||
|
*/
|
|||
|
|
|||
|
return vec3( result );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// End Rect Area Light
|
|||
|
|
|||
|
// ref: https://www.unrealengine.com/blog/physically-based-shading-on-mobile - environmentBRDF for GGX on mobile
|
|||
|
vec3 BRDF_Specular_GGX_Environment( const in GeometricContext geometry, const in vec3 specularColor, const in float roughness ) {
|
|||
|
|
|||
|
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
|
|||
|
const vec4 c0 = vec4( - 1, - 0.0275, - 0.572, 0.022 );
|
|||
|
|
|||
|
const vec4 c1 = vec4( 1, 0.0425, 1.04, - 0.04 );
|
|||
|
|
|||
|
vec4 r = roughness * c0 + c1;
|
|||
|
|
|||
|
float a004 = min( r.x * r.x, exp2( - 9.28 * dotNV ) ) * r.x + r.y;
|
|||
|
|
|||
|
vec2 AB = vec2( -1.04, 1.04 ) * a004 + r.zw;
|
|||
|
|
|||
|
return specularColor * AB.x + AB.y;
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
|
|||
|
float G_BlinnPhong_Implicit( /* const in float dotNL, const in float dotNV */ ) {
|
|||
|
|
|||
|
// geometry term is (n dot l)(n dot v) / 4(n dot l)(n dot v)
|
|||
|
return 0.25;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float D_BlinnPhong( const in float shininess, const in float dotNH ) {
|
|||
|
|
|||
|
return RECIPROCAL_PI * ( shininess * 0.5 + 1.0 ) * pow( dotNH, shininess );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 BRDF_Specular_BlinnPhong( const in IncidentLight incidentLight, const in GeometricContext geometry, const in vec3 specularColor, const in float shininess ) {
|
|||
|
|
|||
|
vec3 halfDir = normalize( incidentLight.direction + geometry.viewDir );
|
|||
|
|
|||
|
//float dotNL = saturate( dot( geometry.normal, incidentLight.direction ) );
|
|||
|
//float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
float dotNH = saturate( dot( geometry.normal, halfDir ) );
|
|||
|
float dotLH = saturate( dot( incidentLight.direction, halfDir ) );
|
|||
|
|
|||
|
vec3 F = F_Schlick( specularColor, dotLH );
|
|||
|
|
|||
|
float G = G_BlinnPhong_Implicit( /* dotNL, dotNV */ );
|
|||
|
|
|||
|
float D = D_BlinnPhong( shininess, dotNH );
|
|||
|
|
|||
|
return F * ( G * D );
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
// source: http://simonstechblog.blogspot.ca/2011/12/microfacet-brdf.html
|
|||
|
float GGXRoughnessToBlinnExponent( const in float ggxRoughness ) {
|
|||
|
return ( 2.0 / pow2( ggxRoughness + 0.0001 ) - 2.0 );
|
|||
|
}
|
|||
|
|
|||
|
float BlinnExponentToGGXRoughness( const in float blinnExponent ) {
|
|||
|
return sqrt( 2.0 / ( blinnExponent + 2.0 ) );
|
|||
|
}
|
|||
|
`,MQ=`
|
|||
|
#ifdef USE_BUMPMAP
|
|||
|
|
|||
|
uniform sampler2D bumpMap;
|
|||
|
uniform float bumpScale;
|
|||
|
|
|||
|
// Bump Mapping Unparametrized Surfaces on the GPU by Morten S. Mikkelsen
|
|||
|
// http://api.unrealengine.com/attachments/Engine/Rendering/LightingAndShadows/BumpMappingWithoutTangentSpace/mm_sfgrad_bump.pdf
|
|||
|
|
|||
|
// Evaluate the derivative of the height w.r.t. screen-space using forward differencing (listing 2)
|
|||
|
|
|||
|
vec2 dHdxy_fwd() {
|
|||
|
|
|||
|
vec2 dSTdx = dFdx( vUv );
|
|||
|
vec2 dSTdy = dFdy( vUv );
|
|||
|
|
|||
|
float Hll = bumpScale * texture2D( bumpMap, vUv ).x;
|
|||
|
float dBx = bumpScale * texture2D( bumpMap, vUv + dSTdx ).x - Hll;
|
|||
|
float dBy = bumpScale * texture2D( bumpMap, vUv + dSTdy ).x - Hll;
|
|||
|
|
|||
|
return vec2( dBx, dBy );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 perturbNormalArb( vec3 surf_pos, vec3 surf_norm, vec2 dHdxy ) {
|
|||
|
|
|||
|
// Workaround for Adreno 3XX dFd*( vec3 ) bug. See #9988
|
|||
|
|
|||
|
vec3 vSigmaX = vec3( dFdx( surf_pos.x ), dFdx( surf_pos.y ), dFdx( surf_pos.z ) );
|
|||
|
vec3 vSigmaY = vec3( dFdy( surf_pos.x ), dFdy( surf_pos.y ), dFdy( surf_pos.z ) );
|
|||
|
vec3 vN = surf_norm; // normalized
|
|||
|
|
|||
|
vec3 R1 = cross( vSigmaY, vN );
|
|||
|
vec3 R2 = cross( vN, vSigmaX );
|
|||
|
|
|||
|
float fDet = dot( vSigmaX, R1 );
|
|||
|
|
|||
|
fDet *= ( float( gl_FrontFacing ) * 2.0 - 1.0 );
|
|||
|
|
|||
|
vec3 vGrad = sign( fDet ) * ( dHdxy.x * R1 + dHdxy.y * R2 );
|
|||
|
return normalize( abs( fDet ) * surf_norm - vGrad );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,LQ=`
|
|||
|
#if NUM_CLIPPING_PLANES > 0
|
|||
|
|
|||
|
vec4 plane;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < UNION_CLIPPING_PLANES; i ++ ) {
|
|||
|
|
|||
|
plane = clippingPlanes[ i ];
|
|||
|
if ( dot( vViewPosition, plane.xyz ) > plane.w ) discard;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES
|
|||
|
|
|||
|
bool clipped = true;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {
|
|||
|
|
|||
|
plane = clippingPlanes[ i ];
|
|||
|
clipped = ( dot( vViewPosition, plane.xyz ) > plane.w ) && clipped;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
if ( clipped ) discard;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,RQ=`
|
|||
|
#if NUM_CLIPPING_PLANES > 0
|
|||
|
|
|||
|
#if ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
|
|||
|
varying vec3 vViewPosition;
|
|||
|
#endif
|
|||
|
|
|||
|
uniform vec4 clippingPlanes[ NUM_CLIPPING_PLANES ];
|
|||
|
|
|||
|
#endif
|
|||
|
`,UQ=`
|
|||
|
#if NUM_CLIPPING_PLANES > 0 && ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
|
|||
|
varying vec3 vViewPosition;
|
|||
|
#endif
|
|||
|
`,OQ=`
|
|||
|
#if NUM_CLIPPING_PLANES > 0 && ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
#endif
|
|||
|
`,NQ=`
|
|||
|
#ifdef USE_COLOR
|
|||
|
|
|||
|
diffuseColor.rgb *= vColor;
|
|||
|
|
|||
|
#endif
|
|||
|
`,HQ=`
|
|||
|
#ifdef USE_COLOR
|
|||
|
|
|||
|
varying vec3 vColor;
|
|||
|
|
|||
|
#endif
|
|||
|
`,QQ=`
|
|||
|
#ifdef USE_COLOR
|
|||
|
|
|||
|
varying vec3 vColor;
|
|||
|
|
|||
|
#endif
|
|||
|
`,jQ=`
|
|||
|
#ifdef USE_COLOR
|
|||
|
|
|||
|
vColor.xyz = color.xyz;
|
|||
|
|
|||
|
#endif
|
|||
|
`,GQ=`
|
|||
|
#define PI 3.14159265359
|
|||
|
#define PI2 6.28318530718
|
|||
|
#define PI_HALF 1.5707963267949
|
|||
|
#define RECIPROCAL_PI 0.31830988618
|
|||
|
#define RECIPROCAL_PI2 0.15915494
|
|||
|
#define LOG2 1.442695
|
|||
|
#define EPSILON 1e-6
|
|||
|
|
|||
|
#define saturate(a) clamp( a, 0.0, 1.0 )
|
|||
|
#define whiteCompliment(a) ( 1.0 - saturate( a ) )
|
|||
|
|
|||
|
float pow2( const in float x ) { return x*x; }
|
|||
|
float pow3( const in float x ) { return x*x*x; }
|
|||
|
float pow4( const in float x ) { float x2 = x*x; return x2*x2; }
|
|||
|
float average( const in vec3 color ) { return dot( color, vec3( 0.3333 ) ); }
|
|||
|
// expects values in the range of [0,1]x[0,1], returns values in the [0,1] range.
|
|||
|
// do not collapse into a single function per: http://byteblacksmith.com/improvements-to-the-canonical-one-liner-glsl-rand-for-opengl-es-2-0/
|
|||
|
highp float rand( const in vec2 uv ) {
|
|||
|
const highp float a = 12.9898, b = 78.233, c = 43758.5453;
|
|||
|
highp float dt = dot( uv.xy, vec2( a,b ) ), sn = mod( dt, PI );
|
|||
|
return fract(sin(sn) * c);
|
|||
|
}
|
|||
|
|
|||
|
struct IncidentLight {
|
|||
|
vec3 color;
|
|||
|
vec3 direction;
|
|||
|
bool visible;
|
|||
|
};
|
|||
|
|
|||
|
struct ReflectedLight {
|
|||
|
vec3 directDiffuse;
|
|||
|
vec3 directSpecular;
|
|||
|
vec3 indirectDiffuse;
|
|||
|
vec3 indirectSpecular;
|
|||
|
};
|
|||
|
|
|||
|
struct GeometricContext {
|
|||
|
vec3 position;
|
|||
|
vec3 normal;
|
|||
|
vec3 viewDir;
|
|||
|
};
|
|||
|
|
|||
|
vec3 transformDirection( in vec3 dir, in mat4 matrix ) {
|
|||
|
|
|||
|
return normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// http://en.wikibooks.org/wiki/GLSL_Programming/Applying_Matrix_Transformations
|
|||
|
vec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {
|
|||
|
|
|||
|
return normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 projectOnPlane(in vec3 point, in vec3 pointOnPlane, in vec3 planeNormal ) {
|
|||
|
|
|||
|
float distance = dot( planeNormal, point - pointOnPlane );
|
|||
|
|
|||
|
return - distance * planeNormal + point;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float sideOfPlane( in vec3 point, in vec3 pointOnPlane, in vec3 planeNormal ) {
|
|||
|
|
|||
|
return sign( dot( point - pointOnPlane, planeNormal ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 linePlaneIntersect( in vec3 pointOnLine, in vec3 lineDirection, in vec3 pointOnPlane, in vec3 planeNormal ) {
|
|||
|
|
|||
|
return lineDirection * ( dot( planeNormal, pointOnPlane - pointOnLine ) / dot( planeNormal, lineDirection ) ) + pointOnLine;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
mat3 transposeMat3( const in mat3 m ) {
|
|||
|
|
|||
|
mat3 tmp;
|
|||
|
|
|||
|
tmp[ 0 ] = vec3( m[ 0 ].x, m[ 1 ].x, m[ 2 ].x );
|
|||
|
tmp[ 1 ] = vec3( m[ 0 ].y, m[ 1 ].y, m[ 2 ].y );
|
|||
|
tmp[ 2 ] = vec3( m[ 0 ].z, m[ 1 ].z, m[ 2 ].z );
|
|||
|
|
|||
|
return tmp;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// https://en.wikipedia.org/wiki/Relative_luminance
|
|||
|
float linearToRelativeLuminance( const in vec3 color ) {
|
|||
|
|
|||
|
vec3 weights = vec3( 0.2126, 0.7152, 0.0722 );
|
|||
|
|
|||
|
return dot( weights, color.rgb );
|
|||
|
|
|||
|
}
|
|||
|
`,zQ=`
|
|||
|
#ifdef ENVMAP_TYPE_CUBE_UV
|
|||
|
|
|||
|
#define cubeUV_textureSize (1024.0)
|
|||
|
|
|||
|
int getFaceFromDirection(vec3 direction) {
|
|||
|
vec3 absDirection = abs(direction);
|
|||
|
int face = -1;
|
|||
|
if( absDirection.x > absDirection.z ) {
|
|||
|
if(absDirection.x > absDirection.y )
|
|||
|
face = direction.x > 0.0 ? 0 : 3;
|
|||
|
else
|
|||
|
face = direction.y > 0.0 ? 1 : 4;
|
|||
|
}
|
|||
|
else {
|
|||
|
if(absDirection.z > absDirection.y )
|
|||
|
face = direction.z > 0.0 ? 2 : 5;
|
|||
|
else
|
|||
|
face = direction.y > 0.0 ? 1 : 4;
|
|||
|
}
|
|||
|
return face;
|
|||
|
}
|
|||
|
#define cubeUV_maxLods1 (log2(cubeUV_textureSize*0.25) - 1.0)
|
|||
|
#define cubeUV_rangeClamp (exp2((6.0 - 1.0) * 2.0))
|
|||
|
|
|||
|
vec2 MipLevelInfo( vec3 vec, float roughnessLevel, float roughness ) {
|
|||
|
float scale = exp2(cubeUV_maxLods1 - roughnessLevel);
|
|||
|
float dxRoughness = dFdx(roughness);
|
|||
|
float dyRoughness = dFdy(roughness);
|
|||
|
vec3 dx = dFdx( vec * scale * dxRoughness );
|
|||
|
vec3 dy = dFdy( vec * scale * dyRoughness );
|
|||
|
float d = max( dot( dx, dx ), dot( dy, dy ) );
|
|||
|
// Clamp the value to the max mip level counts. hard coded to 6 mips
|
|||
|
d = clamp(d, 1.0, cubeUV_rangeClamp);
|
|||
|
float mipLevel = 0.5 * log2(d);
|
|||
|
return vec2(floor(mipLevel), fract(mipLevel));
|
|||
|
}
|
|||
|
|
|||
|
#define cubeUV_maxLods2 (log2(cubeUV_textureSize*0.25) - 2.0)
|
|||
|
#define cubeUV_rcpTextureSize (1.0 / cubeUV_textureSize)
|
|||
|
|
|||
|
vec2 getCubeUV(vec3 direction, float roughnessLevel, float mipLevel) {
|
|||
|
mipLevel = roughnessLevel > cubeUV_maxLods2 - 3.0 ? 0.0 : mipLevel;
|
|||
|
float a = 16.0 * cubeUV_rcpTextureSize;
|
|||
|
|
|||
|
vec2 exp2_packed = exp2( vec2( roughnessLevel, mipLevel ) );
|
|||
|
vec2 rcp_exp2_packed = vec2( 1.0 ) / exp2_packed;
|
|||
|
// float powScale = exp2(roughnessLevel + mipLevel);
|
|||
|
float powScale = exp2_packed.x * exp2_packed.y;
|
|||
|
// float scale = 1.0 / exp2(roughnessLevel + 2.0 + mipLevel);
|
|||
|
float scale = rcp_exp2_packed.x * rcp_exp2_packed.y * 0.25;
|
|||
|
// float mipOffset = 0.75*(1.0 - 1.0/exp2(mipLevel))/exp2(roughnessLevel);
|
|||
|
float mipOffset = 0.75*(1.0 - rcp_exp2_packed.y) * rcp_exp2_packed.x;
|
|||
|
|
|||
|
bool bRes = mipLevel == 0.0;
|
|||
|
scale = bRes && (scale < a) ? a : scale;
|
|||
|
|
|||
|
vec3 r;
|
|||
|
vec2 offset;
|
|||
|
int face = getFaceFromDirection(direction);
|
|||
|
|
|||
|
float rcpPowScale = 1.0 / powScale;
|
|||
|
|
|||
|
if( face == 0) {
|
|||
|
r = vec3(direction.x, -direction.z, direction.y);
|
|||
|
offset = vec2(0.0+mipOffset,0.75 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
|
|||
|
}
|
|||
|
else if( face == 1) {
|
|||
|
r = vec3(direction.y, direction.x, direction.z);
|
|||
|
offset = vec2(scale+mipOffset, 0.75 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
|
|||
|
}
|
|||
|
else if( face == 2) {
|
|||
|
r = vec3(direction.z, direction.x, direction.y);
|
|||
|
offset = vec2(2.0*scale+mipOffset, 0.75 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
|
|||
|
}
|
|||
|
else if( face == 3) {
|
|||
|
r = vec3(direction.x, direction.z, direction.y);
|
|||
|
offset = vec2(0.0+mipOffset,0.5 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
|
|||
|
}
|
|||
|
else if( face == 4) {
|
|||
|
r = vec3(direction.y, direction.x, -direction.z);
|
|||
|
offset = vec2(scale+mipOffset, 0.5 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
|
|||
|
}
|
|||
|
else {
|
|||
|
r = vec3(direction.z, -direction.x, direction.y);
|
|||
|
offset = vec2(2.0*scale+mipOffset, 0.5 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
|
|||
|
}
|
|||
|
r = normalize(r);
|
|||
|
float texelOffset = 0.5 * cubeUV_rcpTextureSize;
|
|||
|
vec2 s = ( r.yz / abs( r.x ) + vec2( 1.0 ) ) * 0.5;
|
|||
|
vec2 base = offset + vec2( texelOffset );
|
|||
|
return base + s * ( scale - 2.0 * texelOffset );
|
|||
|
}
|
|||
|
|
|||
|
#define cubeUV_maxLods3 (log2(cubeUV_textureSize*0.25) - 3.0)
|
|||
|
|
|||
|
vec4 textureCubeUV( sampler2D envMap, vec3 reflectedDirection, float roughness ) {
|
|||
|
float roughnessVal = roughness* cubeUV_maxLods3;
|
|||
|
float r1 = floor(roughnessVal);
|
|||
|
float r2 = r1 + 1.0;
|
|||
|
float t = fract(roughnessVal);
|
|||
|
vec2 mipInfo = MipLevelInfo(reflectedDirection, r1, roughness);
|
|||
|
float s = mipInfo.y;
|
|||
|
float level0 = mipInfo.x;
|
|||
|
float level1 = level0 + 1.0;
|
|||
|
level1 = level1 > 5.0 ? 5.0 : level1;
|
|||
|
|
|||
|
// round to nearest mipmap if we are not interpolating.
|
|||
|
level0 += min( floor( s + 0.5 ), 5.0 );
|
|||
|
|
|||
|
// Tri linear interpolation.
|
|||
|
vec2 uv_10 = getCubeUV(reflectedDirection, r1, level0);
|
|||
|
vec4 color10 = envMapTexelToLinear(texture2D(envMap, uv_10));
|
|||
|
|
|||
|
vec2 uv_20 = getCubeUV(reflectedDirection, r2, level0);
|
|||
|
vec4 color20 = envMapTexelToLinear(texture2D(envMap, uv_20));
|
|||
|
|
|||
|
vec4 result = mix(color10, color20, t);
|
|||
|
|
|||
|
return vec4(result.rgb, 1.0);
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,qQ=`
|
|||
|
vec3 transformedNormal = normalMatrix * objectNormal;
|
|||
|
|
|||
|
#ifdef FLIP_SIDED
|
|||
|
|
|||
|
transformedNormal = - transformedNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
`,VQ=`
|
|||
|
#ifdef USE_DISPLACEMENTMAP
|
|||
|
|
|||
|
uniform sampler2D displacementMap;
|
|||
|
uniform float displacementScale;
|
|||
|
uniform float displacementBias;
|
|||
|
|
|||
|
#endif
|
|||
|
`,WQ=`
|
|||
|
#ifdef USE_DISPLACEMENTMAP
|
|||
|
|
|||
|
transformed += normalize( objectNormal ) * ( texture2D( displacementMap, uv ).x * displacementScale + displacementBias );
|
|||
|
|
|||
|
#endif
|
|||
|
`,KQ=`
|
|||
|
#ifdef USE_EMISSIVEMAP
|
|||
|
|
|||
|
vec4 emissiveColor = texture2D( emissiveMap, vUv );
|
|||
|
|
|||
|
emissiveColor.rgb = emissiveMapTexelToLinear( emissiveColor ).rgb;
|
|||
|
|
|||
|
totalEmissiveRadiance *= emissiveColor.rgb;
|
|||
|
|
|||
|
#endif
|
|||
|
`,YQ=`
|
|||
|
#ifdef USE_EMISSIVEMAP
|
|||
|
|
|||
|
uniform sampler2D emissiveMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,XQ=`
|
|||
|
gl_FragColor = linearToOutputTexel( gl_FragColor );
|
|||
|
`,JQ=`
|
|||
|
// For a discussion of what this is, please read this: http://lousodrome.net/blog/light/2013/05/26/gamma-correct-and-hdr-rendering-in-a-32-bits-buffer/
|
|||
|
|
|||
|
vec4 LinearToLinear( in vec4 value ) {
|
|||
|
return value;
|
|||
|
}
|
|||
|
|
|||
|
vec4 GammaToLinear( in vec4 value, in float gammaFactor ) {
|
|||
|
return vec4( pow( value.rgb, vec3( gammaFactor ) ), value.a );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearToGamma( in vec4 value, in float gammaFactor ) {
|
|||
|
return vec4( pow( value.rgb, vec3( 1.0 / gammaFactor ) ), value.a );
|
|||
|
}
|
|||
|
|
|||
|
vec4 sRGBToLinear( in vec4 value ) {
|
|||
|
return vec4( mix( pow( value.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), value.rgb * 0.0773993808, vec3( lessThanEqual( value.rgb, vec3( 0.04045 ) ) ) ), value.a );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearTosRGB( in vec4 value ) {
|
|||
|
return vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );
|
|||
|
}
|
|||
|
|
|||
|
vec4 RGBEToLinear( in vec4 value ) {
|
|||
|
return vec4( value.rgb * exp2( value.a * 255.0 - 128.0 ), 1.0 );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearToRGBE( in vec4 value ) {
|
|||
|
float maxComponent = max( max( value.r, value.g ), value.b );
|
|||
|
float fExp = clamp( ceil( log2( maxComponent ) ), -128.0, 127.0 );
|
|||
|
return vec4( value.rgb / exp2( fExp ), ( fExp + 128.0 ) / 255.0 );
|
|||
|
// return vec4( value.brg, ( 3.0 + 128.0 ) / 256.0 );
|
|||
|
}
|
|||
|
|
|||
|
// reference: http://iwasbeingirony.blogspot.ca/2010/06/difference-between-rgbm-and-rgbd.html
|
|||
|
vec4 RGBMToLinear( in vec4 value, in float maxRange ) {
|
|||
|
return vec4( value.rgb * value.a * maxRange, 1.0 );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearToRGBM( in vec4 value, in float maxRange ) {
|
|||
|
float maxRGB = max( value.r, max( value.g, value.b ) );
|
|||
|
float M = clamp( maxRGB / maxRange, 0.0, 1.0 );
|
|||
|
M = ceil( M * 255.0 ) / 255.0;
|
|||
|
return vec4( value.rgb / ( M * maxRange ), M );
|
|||
|
}
|
|||
|
|
|||
|
// reference: http://iwasbeingirony.blogspot.ca/2010/06/difference-between-rgbm-and-rgbd.html
|
|||
|
vec4 RGBDToLinear( in vec4 value, in float maxRange ) {
|
|||
|
return vec4( value.rgb * ( ( maxRange / 255.0 ) / value.a ), 1.0 );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearToRGBD( in vec4 value, in float maxRange ) {
|
|||
|
float maxRGB = max( value.r, max( value.g, value.b ) );
|
|||
|
float D = max( maxRange / maxRGB, 1.0 );
|
|||
|
D = min( floor( D ) / 255.0, 1.0 );
|
|||
|
return vec4( value.rgb * ( D * ( 255.0 / maxRange ) ), D );
|
|||
|
}
|
|||
|
|
|||
|
// LogLuv reference: http://graphicrants.blogspot.ca/2009/04/rgbm-color-encoding.html
|
|||
|
|
|||
|
// M matrix, for encoding
|
|||
|
const mat3 cLogLuvM = mat3( 0.2209, 0.3390, 0.4184, 0.1138, 0.6780, 0.7319, 0.0102, 0.1130, 0.2969 );
|
|||
|
vec4 LinearToLogLuv( in vec4 value ) {
|
|||
|
vec3 Xp_Y_XYZp = value.rgb * cLogLuvM;
|
|||
|
Xp_Y_XYZp = max( Xp_Y_XYZp, vec3( 1e-6, 1e-6, 1e-6 ) );
|
|||
|
vec4 vResult;
|
|||
|
vResult.xy = Xp_Y_XYZp.xy / Xp_Y_XYZp.z;
|
|||
|
float Le = 2.0 * log2(Xp_Y_XYZp.y) + 127.0;
|
|||
|
vResult.w = fract( Le );
|
|||
|
vResult.z = ( Le - ( floor( vResult.w * 255.0 ) ) / 255.0 ) / 255.0;
|
|||
|
return vResult;
|
|||
|
}
|
|||
|
|
|||
|
// Inverse M matrix, for decoding
|
|||
|
const mat3 cLogLuvInverseM = mat3( 6.0014, -2.7008, -1.7996, -1.3320, 3.1029, -5.7721, 0.3008, -1.0882, 5.6268 );
|
|||
|
vec4 LogLuvToLinear( in vec4 value ) {
|
|||
|
float Le = value.z * 255.0 + value.w;
|
|||
|
vec3 Xp_Y_XYZp;
|
|||
|
Xp_Y_XYZp.y = exp2( ( Le - 127.0 ) / 2.0 );
|
|||
|
Xp_Y_XYZp.z = Xp_Y_XYZp.y / value.y;
|
|||
|
Xp_Y_XYZp.x = value.x * Xp_Y_XYZp.z;
|
|||
|
vec3 vRGB = Xp_Y_XYZp.rgb * cLogLuvInverseM;
|
|||
|
return vec4( max( vRGB, 0.0 ), 1.0 );
|
|||
|
}
|
|||
|
`,ZQ=`
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
|
|||
|
|
|||
|
vec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );
|
|||
|
|
|||
|
// Transforming Normal Vectors with the Inverse Transformation
|
|||
|
vec3 worldNormal = inverseTransformDirection( normal, viewMatrix );
|
|||
|
|
|||
|
#ifdef ENVMAP_MODE_REFLECTION
|
|||
|
|
|||
|
vec3 reflectVec = reflect( cameraToVertex, worldNormal );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 reflectVec = refract( cameraToVertex, worldNormal, refractionRatio );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 reflectVec = vReflect;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifdef ENVMAP_TYPE_CUBE
|
|||
|
|
|||
|
vec4 envColor = textureCube( envMap, vec3( flipEnvMap * reflectVec.x, reflectVec.yz ) );
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_EQUIREC )
|
|||
|
|
|||
|
vec2 sampleUV;
|
|||
|
|
|||
|
reflectVec = normalize( reflectVec );
|
|||
|
|
|||
|
sampleUV.y = asin( clamp( reflectVec.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
|
|||
|
|
|||
|
sampleUV.x = atan( reflectVec.z, reflectVec.x ) * RECIPROCAL_PI2 + 0.5;
|
|||
|
|
|||
|
vec4 envColor = texture2D( envMap, sampleUV );
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_SPHERE )
|
|||
|
|
|||
|
reflectVec = normalize( reflectVec );
|
|||
|
|
|||
|
vec3 reflectView = normalize( ( viewMatrix * vec4( reflectVec, 0.0 ) ).xyz + vec3( 0.0, 0.0, 1.0 ) );
|
|||
|
|
|||
|
vec4 envColor = texture2D( envMap, reflectView.xy * 0.5 + 0.5 );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envColor = vec4( 0.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envColor = envMapTexelToLinear( envColor );
|
|||
|
|
|||
|
#ifdef ENVMAP_BLENDING_MULTIPLY
|
|||
|
|
|||
|
outgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );
|
|||
|
|
|||
|
#elif defined( ENVMAP_BLENDING_MIX )
|
|||
|
|
|||
|
outgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );
|
|||
|
|
|||
|
#elif defined( ENVMAP_BLENDING_ADD )
|
|||
|
|
|||
|
outgoingLight += envColor.xyz * specularStrength * reflectivity;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,$Q=`
|
|||
|
#if defined( USE_ENVMAP ) || defined( PHYSICAL )
|
|||
|
uniform float reflectivity;
|
|||
|
uniform float envMapIntensity;
|
|||
|
#endif
|
|||
|
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#if ! defined( PHYSICAL ) && ( defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) )
|
|||
|
varying vec3 vWorldPosition;
|
|||
|
#endif
|
|||
|
|
|||
|
#ifdef ENVMAP_TYPE_CUBE
|
|||
|
uniform samplerCube envMap;
|
|||
|
#else
|
|||
|
uniform sampler2D envMap;
|
|||
|
#endif
|
|||
|
uniform float flipEnvMap;
|
|||
|
uniform int maxMipLevel;
|
|||
|
|
|||
|
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) || defined( PHYSICAL )
|
|||
|
uniform float refractionRatio;
|
|||
|
#else
|
|||
|
varying vec3 vReflect;
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,ej=`
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
|
|||
|
varying vec3 vWorldPosition;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
varying vec3 vReflect;
|
|||
|
uniform float refractionRatio;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,tj=`
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
|
|||
|
|
|||
|
vWorldPosition = worldPosition.xyz;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 cameraToVertex = normalize( worldPosition.xyz - cameraPosition );
|
|||
|
|
|||
|
vec3 worldNormal = inverseTransformDirection( transformedNormal, viewMatrix );
|
|||
|
|
|||
|
#ifdef ENVMAP_MODE_REFLECTION
|
|||
|
|
|||
|
vReflect = reflect( cameraToVertex, worldNormal );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vReflect = refract( cameraToVertex, worldNormal, refractionRatio );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,ij=`
|
|||
|
#ifdef USE_FOG
|
|||
|
|
|||
|
fogDepth = -mvPosition.z;
|
|||
|
|
|||
|
#endif
|
|||
|
`,nj=`
|
|||
|
#ifdef USE_FOG
|
|||
|
|
|||
|
varying float fogDepth;
|
|||
|
|
|||
|
#endif
|
|||
|
`,aj=`
|
|||
|
#ifdef USE_FOG
|
|||
|
|
|||
|
#ifdef FOG_EXP2
|
|||
|
|
|||
|
float fogFactor = whiteCompliment( exp2( - fogDensity * fogDensity * fogDepth * fogDepth * LOG2 ) );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
float fogFactor = smoothstep( fogNear, fogFar, fogDepth );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
gl_FragColor.rgb = mix( gl_FragColor.rgb, fogColor, fogFactor );
|
|||
|
|
|||
|
#endif
|
|||
|
`,sj=`
|
|||
|
#ifdef USE_FOG
|
|||
|
|
|||
|
uniform vec3 fogColor;
|
|||
|
varying float fogDepth;
|
|||
|
|
|||
|
#ifdef FOG_EXP2
|
|||
|
|
|||
|
uniform float fogDensity;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
uniform float fogNear;
|
|||
|
uniform float fogFar;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,rj=`
|
|||
|
#ifdef TOON
|
|||
|
|
|||
|
uniform sampler2D gradientMap;
|
|||
|
|
|||
|
vec3 getGradientIrradiance( vec3 normal, vec3 lightDirection ) {
|
|||
|
|
|||
|
// dotNL will be from -1.0 to 1.0
|
|||
|
float dotNL = dot( normal, lightDirection );
|
|||
|
vec2 coord = vec2( dotNL * 0.5 + 0.5, 0.0 );
|
|||
|
|
|||
|
#ifdef USE_GRADIENTMAP
|
|||
|
|
|||
|
return texture2D( gradientMap, coord ).rgb;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
return ( coord.x < 0.7 ) ? vec3( 0.7 ) : vec3( 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,oj=`
|
|||
|
#ifdef USE_LIGHTMAP
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += PI * texture2D( lightMap, vUv2 ).xyz * lightMapIntensity; // factor of PI should not be present; included here to prevent breakage
|
|||
|
|
|||
|
#endif
|
|||
|
`,lj=`
|
|||
|
#ifdef USE_LIGHTMAP
|
|||
|
|
|||
|
uniform sampler2D lightMap;
|
|||
|
uniform float lightMapIntensity;
|
|||
|
|
|||
|
#endif
|
|||
|
`,uj=`
|
|||
|
vec3 diffuse = vec3( 1.0 );
|
|||
|
|
|||
|
GeometricContext geometry;
|
|||
|
geometry.position = mvPosition.xyz;
|
|||
|
geometry.normal = normalize( transformedNormal );
|
|||
|
geometry.viewDir = normalize( -mvPosition.xyz );
|
|||
|
|
|||
|
GeometricContext backGeometry;
|
|||
|
backGeometry.position = geometry.position;
|
|||
|
backGeometry.normal = -geometry.normal;
|
|||
|
backGeometry.viewDir = geometry.viewDir;
|
|||
|
|
|||
|
vLightFront = vec3( 0.0 );
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
vLightBack = vec3( 0.0 );
|
|||
|
#endif
|
|||
|
|
|||
|
IncidentLight directLight;
|
|||
|
float dotNL;
|
|||
|
vec3 directLightColor_Diffuse;
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
getPointDirectLightIrradiance( pointLights[ i ], geometry, directLight );
|
|||
|
|
|||
|
dotNL = dot( geometry.normal, directLight.direction );
|
|||
|
directLightColor_Diffuse = PI * directLight.color;
|
|||
|
|
|||
|
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
getSpotDirectLightIrradiance( spotLights[ i ], geometry, directLight );
|
|||
|
|
|||
|
dotNL = dot( geometry.normal, directLight.direction );
|
|||
|
directLightColor_Diffuse = PI * directLight.color;
|
|||
|
|
|||
|
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#endif
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
for ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
// TODO (abelnation): implement
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
getDirectionalDirectLightIrradiance( directionalLights[ i ], geometry, directLight );
|
|||
|
|
|||
|
dotNL = dot( geometry.normal, directLight.direction );
|
|||
|
directLightColor_Diffuse = PI * directLight.color;
|
|||
|
|
|||
|
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_HEMI_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
vLightFront += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry );
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
vLightBack += getHemisphereLightIrradiance( hemisphereLights[ i ], backGeometry );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,cj=`
|
|||
|
uniform vec3 ambientLightColor;
|
|||
|
|
|||
|
vec3 getAmbientLightIrradiance( const in vec3 ambientLightColor ) {
|
|||
|
|
|||
|
vec3 irradiance = ambientLightColor;
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
irradiance *= PI;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return irradiance;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
struct DirectionalLight {
|
|||
|
vec3 direction;
|
|||
|
vec3 color;
|
|||
|
|
|||
|
int shadow;
|
|||
|
float shadowBias;
|
|||
|
float shadowRadius;
|
|||
|
vec2 shadowMapSize;
|
|||
|
};
|
|||
|
|
|||
|
uniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];
|
|||
|
|
|||
|
void getDirectionalDirectLightIrradiance( const in DirectionalLight directionalLight, const in GeometricContext geometry, out IncidentLight directLight ) {
|
|||
|
|
|||
|
directLight.color = directionalLight.color;
|
|||
|
directLight.direction = directionalLight.direction;
|
|||
|
directLight.visible = true;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
struct PointLight {
|
|||
|
vec3 position;
|
|||
|
vec3 color;
|
|||
|
float distance;
|
|||
|
float decay;
|
|||
|
|
|||
|
int shadow;
|
|||
|
float shadowBias;
|
|||
|
float shadowRadius;
|
|||
|
vec2 shadowMapSize;
|
|||
|
float shadowCameraNear;
|
|||
|
float shadowCameraFar;
|
|||
|
};
|
|||
|
|
|||
|
uniform PointLight pointLights[ NUM_POINT_LIGHTS ];
|
|||
|
|
|||
|
// directLight is an out parameter as having it as a return value caused compiler errors on some devices
|
|||
|
void getPointDirectLightIrradiance( const in PointLight pointLight, const in GeometricContext geometry, out IncidentLight directLight ) {
|
|||
|
|
|||
|
vec3 lVector = pointLight.position - geometry.position;
|
|||
|
directLight.direction = normalize( lVector );
|
|||
|
|
|||
|
float lightDistance = length( lVector );
|
|||
|
|
|||
|
directLight.color = pointLight.color;
|
|||
|
directLight.color *= punctualLightIntensityToIrradianceFactor( lightDistance, pointLight.distance, pointLight.decay );
|
|||
|
directLight.visible = ( directLight.color != vec3( 0.0 ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
struct SpotLight {
|
|||
|
vec3 position;
|
|||
|
vec3 direction;
|
|||
|
vec3 color;
|
|||
|
float distance;
|
|||
|
float decay;
|
|||
|
float coneCos;
|
|||
|
float penumbraCos;
|
|||
|
|
|||
|
int shadow;
|
|||
|
float shadowBias;
|
|||
|
float shadowRadius;
|
|||
|
vec2 shadowMapSize;
|
|||
|
};
|
|||
|
|
|||
|
uniform SpotLight spotLights[ NUM_SPOT_LIGHTS ];
|
|||
|
|
|||
|
// directLight is an out parameter as having it as a return value caused compiler errors on some devices
|
|||
|
void getSpotDirectLightIrradiance( const in SpotLight spotLight, const in GeometricContext geometry, out IncidentLight directLight ) {
|
|||
|
|
|||
|
vec3 lVector = spotLight.position - geometry.position;
|
|||
|
directLight.direction = normalize( lVector );
|
|||
|
|
|||
|
float lightDistance = length( lVector );
|
|||
|
float angleCos = dot( directLight.direction, spotLight.direction );
|
|||
|
|
|||
|
if ( angleCos > spotLight.coneCos ) {
|
|||
|
|
|||
|
float spotEffect = smoothstep( spotLight.coneCos, spotLight.penumbraCos, angleCos );
|
|||
|
|
|||
|
directLight.color = spotLight.color;
|
|||
|
directLight.color *= spotEffect * punctualLightIntensityToIrradianceFactor( lightDistance, spotLight.distance, spotLight.decay );
|
|||
|
directLight.visible = true;
|
|||
|
|
|||
|
} else {
|
|||
|
|
|||
|
directLight.color = vec3( 0.0 );
|
|||
|
directLight.visible = false;
|
|||
|
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
struct RectAreaLight {
|
|||
|
vec3 color;
|
|||
|
vec3 position;
|
|||
|
vec3 halfWidth;
|
|||
|
vec3 halfHeight;
|
|||
|
};
|
|||
|
|
|||
|
// Pre-computed values of LinearTransformedCosine approximation of BRDF
|
|||
|
// BRDF approximation Texture is 64x64
|
|||
|
uniform sampler2D ltc_1; // RGBA Float
|
|||
|
uniform sampler2D ltc_2; // RGBA Float
|
|||
|
|
|||
|
uniform RectAreaLight rectAreaLights[ NUM_RECT_AREA_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
#if NUM_HEMI_LIGHTS > 0
|
|||
|
|
|||
|
struct HemisphereLight {
|
|||
|
vec3 direction;
|
|||
|
vec3 skyColor;
|
|||
|
vec3 groundColor;
|
|||
|
};
|
|||
|
|
|||
|
uniform HemisphereLight hemisphereLights[ NUM_HEMI_LIGHTS ];
|
|||
|
|
|||
|
vec3 getHemisphereLightIrradiance( const in HemisphereLight hemiLight, const in GeometricContext geometry ) {
|
|||
|
|
|||
|
float dotNL = dot( geometry.normal, hemiLight.direction );
|
|||
|
float hemiDiffuseWeight = 0.5 * dotNL + 0.5;
|
|||
|
|
|||
|
vec3 irradiance = mix( hemiLight.groundColor, hemiLight.skyColor, hemiDiffuseWeight );
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
irradiance *= PI;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return irradiance;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,dj=`
|
|||
|
#if defined( USE_ENVMAP ) && defined( PHYSICAL )
|
|||
|
|
|||
|
vec3 getLightProbeIndirectIrradiance( /*const in SpecularLightProbe specularLightProbe,*/ const in GeometricContext geometry, const in int maxMIPLevel ) {
|
|||
|
|
|||
|
vec3 worldNormal = inverseTransformDirection( geometry.normal, viewMatrix );
|
|||
|
|
|||
|
#ifdef ENVMAP_TYPE_CUBE
|
|||
|
|
|||
|
vec3 queryVec = vec3( flipEnvMap * worldNormal.x, worldNormal.yz );
|
|||
|
|
|||
|
// TODO: replace with properly filtered cubemaps and access the irradiance LOD level, be it the last LOD level
|
|||
|
// of a specular cubemap, or just the default level of a specially created irradiance cubemap.
|
|||
|
|
|||
|
#ifdef TEXTURE_LOD_EXT
|
|||
|
|
|||
|
vec4 envMapColor = textureCubeLodEXT( envMap, queryVec, float( maxMIPLevel ) );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
// force the bias high to get the last LOD level as it is the most blurred.
|
|||
|
vec4 envMapColor = textureCube( envMap, queryVec, float( maxMIPLevel ) );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_CUBE_UV )
|
|||
|
|
|||
|
vec3 queryVec = vec3( flipEnvMap * worldNormal.x, worldNormal.yz );
|
|||
|
vec4 envMapColor = textureCubeUV( envMap, queryVec, 1.0 );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envMapColor = vec4( 0.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return PI * envMapColor.rgb * envMapIntensity;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// taken from here: http://casual-effects.blogspot.ca/2011/08/plausible-environment-lighting-in-two.html
|
|||
|
float getSpecularMIPLevel( const in float blinnShininessExponent, const in int maxMIPLevel ) {
|
|||
|
|
|||
|
//float envMapWidth = pow( 2.0, maxMIPLevelScalar );
|
|||
|
//float desiredMIPLevel = log2( envMapWidth * sqrt( 3.0 ) ) - 0.5 * log2( pow2( blinnShininessExponent ) + 1.0 );
|
|||
|
|
|||
|
float maxMIPLevelScalar = float( maxMIPLevel );
|
|||
|
float desiredMIPLevel = maxMIPLevelScalar + 0.79248 - 0.5 * log2( pow2( blinnShininessExponent ) + 1.0 );
|
|||
|
|
|||
|
// clamp to allowable LOD ranges.
|
|||
|
return clamp( desiredMIPLevel, 0.0, maxMIPLevelScalar );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 getLightProbeIndirectRadiance( /*const in SpecularLightProbe specularLightProbe,*/ const in GeometricContext geometry, const in float blinnShininessExponent, const in int maxMIPLevel ) {
|
|||
|
|
|||
|
#ifdef ENVMAP_MODE_REFLECTION
|
|||
|
|
|||
|
vec3 reflectVec = reflect( -geometry.viewDir, geometry.normal );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 reflectVec = refract( -geometry.viewDir, geometry.normal, refractionRatio );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
reflectVec = inverseTransformDirection( reflectVec, viewMatrix );
|
|||
|
|
|||
|
float specularMIPLevel = getSpecularMIPLevel( blinnShininessExponent, maxMIPLevel );
|
|||
|
|
|||
|
#ifdef ENVMAP_TYPE_CUBE
|
|||
|
|
|||
|
vec3 queryReflectVec = vec3( flipEnvMap * reflectVec.x, reflectVec.yz );
|
|||
|
|
|||
|
#ifdef TEXTURE_LOD_EXT
|
|||
|
|
|||
|
vec4 envMapColor = textureCubeLodEXT( envMap, queryReflectVec, specularMIPLevel );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envMapColor = textureCube( envMap, queryReflectVec, specularMIPLevel );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_CUBE_UV )
|
|||
|
|
|||
|
vec3 queryReflectVec = vec3( flipEnvMap * reflectVec.x, reflectVec.yz );
|
|||
|
vec4 envMapColor = textureCubeUV( envMap, queryReflectVec, BlinnExponentToGGXRoughness(blinnShininessExponent ));
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_EQUIREC )
|
|||
|
|
|||
|
vec2 sampleUV;
|
|||
|
sampleUV.y = asin( clamp( reflectVec.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
|
|||
|
sampleUV.x = atan( reflectVec.z, reflectVec.x ) * RECIPROCAL_PI2 + 0.5;
|
|||
|
|
|||
|
#ifdef TEXTURE_LOD_EXT
|
|||
|
|
|||
|
vec4 envMapColor = texture2DLodEXT( envMap, sampleUV, specularMIPLevel );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envMapColor = texture2D( envMap, sampleUV, specularMIPLevel );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_SPHERE )
|
|||
|
|
|||
|
vec3 reflectView = normalize( ( viewMatrix * vec4( reflectVec, 0.0 ) ).xyz + vec3( 0.0,0.0,1.0 ) );
|
|||
|
|
|||
|
#ifdef TEXTURE_LOD_EXT
|
|||
|
|
|||
|
vec4 envMapColor = texture2DLodEXT( envMap, reflectView.xy * 0.5 + 0.5, specularMIPLevel );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envMapColor = texture2D( envMap, reflectView.xy * 0.5 + 0.5, specularMIPLevel );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return envMapColor.rgb * envMapIntensity;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,fj=`
|
|||
|
BlinnPhongMaterial material;
|
|||
|
material.diffuseColor = diffuseColor.rgb;
|
|||
|
material.specularColor = specular;
|
|||
|
material.specularShininess = shininess;
|
|||
|
material.specularStrength = specularStrength;
|
|||
|
`,hj=`
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
struct BlinnPhongMaterial {
|
|||
|
|
|||
|
vec3 diffuseColor;
|
|||
|
vec3 specularColor;
|
|||
|
float specularShininess;
|
|||
|
float specularStrength;
|
|||
|
|
|||
|
};
|
|||
|
|
|||
|
void RE_Direct_BlinnPhong( const in IncidentLight directLight, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
#ifdef TOON
|
|||
|
|
|||
|
vec3 irradiance = getGradientIrradiance( geometry.normal, directLight.direction ) * directLight.color;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
float dotNL = saturate( dot( geometry.normal, directLight.direction ) );
|
|||
|
vec3 irradiance = dotNL * directLight.color;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
irradiance *= PI; // punctual light
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
reflectedLight.directDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
|
|||
|
|
|||
|
reflectedLight.directSpecular += irradiance * BRDF_Specular_BlinnPhong( directLight, geometry, material.specularColor, material.specularShininess ) * material.specularStrength;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
void RE_IndirectDiffuse_BlinnPhong( const in vec3 irradiance, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#define RE_Direct RE_Direct_BlinnPhong
|
|||
|
#define RE_IndirectDiffuse RE_IndirectDiffuse_BlinnPhong
|
|||
|
|
|||
|
#define Material_LightProbeLOD( material ) (0)
|
|||
|
`,Aj=`
|
|||
|
PhysicalMaterial material;
|
|||
|
material.diffuseColor = diffuseColor.rgb * ( 1.0 - metalnessFactor );
|
|||
|
material.specularRoughness = clamp( roughnessFactor, 0.04, 1.0 );
|
|||
|
#ifdef STANDARD
|
|||
|
material.specularColor = mix( vec3( DEFAULT_SPECULAR_COEFFICIENT ), diffuseColor.rgb, metalnessFactor );
|
|||
|
#else
|
|||
|
material.specularColor = mix( vec3( MAXIMUM_SPECULAR_COEFFICIENT * pow2( reflectivity ) ), diffuseColor.rgb, metalnessFactor );
|
|||
|
material.clearCoat = saturate( clearCoat ); // Burley clearcoat model
|
|||
|
material.clearCoatRoughness = clamp( clearCoatRoughness, 0.04, 1.0 );
|
|||
|
#endif
|
|||
|
`,gj=`
|
|||
|
struct PhysicalMaterial {
|
|||
|
|
|||
|
vec3 diffuseColor;
|
|||
|
float specularRoughness;
|
|||
|
vec3 specularColor;
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
float clearCoat;
|
|||
|
float clearCoatRoughness;
|
|||
|
#endif
|
|||
|
|
|||
|
};
|
|||
|
|
|||
|
#define MAXIMUM_SPECULAR_COEFFICIENT 0.16
|
|||
|
#define DEFAULT_SPECULAR_COEFFICIENT 0.04
|
|||
|
|
|||
|
// Clear coat directional hemishperical reflectance (this approximation should be improved)
|
|||
|
float clearCoatDHRApprox( const in float roughness, const in float dotNL ) {
|
|||
|
|
|||
|
return DEFAULT_SPECULAR_COEFFICIENT + ( 1.0 - DEFAULT_SPECULAR_COEFFICIENT ) * ( pow( 1.0 - dotNL, 5.0 ) * pow( 1.0 - roughness, 2.0 ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
void RE_Direct_RectArea_Physical( const in RectAreaLight rectAreaLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
vec3 normal = geometry.normal;
|
|||
|
vec3 viewDir = geometry.viewDir;
|
|||
|
vec3 position = geometry.position;
|
|||
|
vec3 lightPos = rectAreaLight.position;
|
|||
|
vec3 halfWidth = rectAreaLight.halfWidth;
|
|||
|
vec3 halfHeight = rectAreaLight.halfHeight;
|
|||
|
vec3 lightColor = rectAreaLight.color;
|
|||
|
float roughness = material.specularRoughness;
|
|||
|
|
|||
|
vec3 rectCoords[ 4 ];
|
|||
|
rectCoords[ 0 ] = lightPos + halfWidth - halfHeight; // counterclockwise; light shines in local neg z direction
|
|||
|
rectCoords[ 1 ] = lightPos - halfWidth - halfHeight;
|
|||
|
rectCoords[ 2 ] = lightPos - halfWidth + halfHeight;
|
|||
|
rectCoords[ 3 ] = lightPos + halfWidth + halfHeight;
|
|||
|
|
|||
|
vec2 uv = LTC_Uv( normal, viewDir, roughness );
|
|||
|
|
|||
|
vec4 t1 = texture2D( ltc_1, uv );
|
|||
|
vec4 t2 = texture2D( ltc_2, uv );
|
|||
|
|
|||
|
mat3 mInv = mat3(
|
|||
|
vec3( t1.x, 0, t1.y ),
|
|||
|
vec3( 0, 1, 0 ),
|
|||
|
vec3( t1.z, 0, t1.w )
|
|||
|
);
|
|||
|
|
|||
|
// LTC Fresnel Approximation by Stephen Hill
|
|||
|
// http://blog.selfshadow.com/publications/s2016-advances/s2016_ltc_fresnel.pdf
|
|||
|
vec3 fresnel = ( material.specularColor * t2.x + ( vec3( 1.0 ) - material.specularColor ) * t2.y );
|
|||
|
|
|||
|
reflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );
|
|||
|
|
|||
|
reflectedLight.directDiffuse += lightColor * material.diffuseColor * LTC_Evaluate( normal, viewDir, position, mat3( 1.0 ), rectCoords );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
void RE_Direct_Physical( const in IncidentLight directLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
float dotNL = saturate( dot( geometry.normal, directLight.direction ) );
|
|||
|
|
|||
|
vec3 irradiance = dotNL * directLight.color;
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
irradiance *= PI; // punctual light
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
float clearCoatDHR = material.clearCoat * clearCoatDHRApprox( material.clearCoatRoughness, dotNL );
|
|||
|
#else
|
|||
|
float clearCoatDHR = 0.0;
|
|||
|
#endif
|
|||
|
|
|||
|
reflectedLight.directSpecular += ( 1.0 - clearCoatDHR ) * irradiance * BRDF_Specular_GGX( directLight, geometry, material.specularColor, material.specularRoughness );
|
|||
|
|
|||
|
reflectedLight.directDiffuse += ( 1.0 - clearCoatDHR ) * irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
|
|||
|
reflectedLight.directSpecular += irradiance * material.clearCoat * BRDF_Specular_GGX( directLight, geometry, vec3( DEFAULT_SPECULAR_COEFFICIENT ), material.clearCoatRoughness );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
void RE_IndirectDiffuse_Physical( const in vec3 irradiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
void RE_IndirectSpecular_Physical( const in vec3 radiance, const in vec3 clearCoatRadiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
float dotNL = dotNV;
|
|||
|
float clearCoatDHR = material.clearCoat * clearCoatDHRApprox( material.clearCoatRoughness, dotNL );
|
|||
|
#else
|
|||
|
float clearCoatDHR = 0.0;
|
|||
|
#endif
|
|||
|
|
|||
|
reflectedLight.indirectSpecular += ( 1.0 - clearCoatDHR ) * radiance * BRDF_Specular_GGX_Environment( geometry, material.specularColor, material.specularRoughness );
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
|
|||
|
reflectedLight.indirectSpecular += clearCoatRadiance * material.clearCoat * BRDF_Specular_GGX_Environment( geometry, vec3( DEFAULT_SPECULAR_COEFFICIENT ), material.clearCoatRoughness );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#define RE_Direct RE_Direct_Physical
|
|||
|
#define RE_Direct_RectArea RE_Direct_RectArea_Physical
|
|||
|
#define RE_IndirectDiffuse RE_IndirectDiffuse_Physical
|
|||
|
#define RE_IndirectSpecular RE_IndirectSpecular_Physical
|
|||
|
|
|||
|
#define Material_BlinnShininessExponent( material ) GGXRoughnessToBlinnExponent( material.specularRoughness )
|
|||
|
#define Material_ClearCoat_BlinnShininessExponent( material ) GGXRoughnessToBlinnExponent( material.clearCoatRoughness )
|
|||
|
|
|||
|
// ref: https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
|
|||
|
float computeSpecularOcclusion( const in float dotNV, const in float ambientOcclusion, const in float roughness ) {
|
|||
|
|
|||
|
return saturate( pow( dotNV + ambientOcclusion, exp2( - 16.0 * roughness - 1.0 ) ) - 1.0 + ambientOcclusion );
|
|||
|
|
|||
|
}
|
|||
|
`,pj=`
|
|||
|
/**
|
|||
|
* This is a template that can be used to light a material, it uses pluggable
|
|||
|
* RenderEquations (RE)for specific lighting scenarios.
|
|||
|
*
|
|||
|
* Instructions for use:
|
|||
|
* - Ensure that both RE_Direct, RE_IndirectDiffuse and RE_IndirectSpecular are defined
|
|||
|
* - If you have defined an RE_IndirectSpecular, you need to also provide a Material_LightProbeLOD. <---- ???
|
|||
|
* - Create a material parameter that is to be passed as the third parameter to your lighting functions.
|
|||
|
*
|
|||
|
* TODO:
|
|||
|
* - Add area light support.
|
|||
|
* - Add sphere light support.
|
|||
|
* - Add diffuse light probe (irradiance cubemap) support.
|
|||
|
*/
|
|||
|
|
|||
|
GeometricContext geometry;
|
|||
|
|
|||
|
geometry.position = - vViewPosition;
|
|||
|
geometry.normal = normal;
|
|||
|
geometry.viewDir = normalize( vViewPosition );
|
|||
|
|
|||
|
IncidentLight directLight;
|
|||
|
|
|||
|
#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )
|
|||
|
|
|||
|
PointLight pointLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
pointLight = pointLights[ i ];
|
|||
|
|
|||
|
getPointDirectLightIrradiance( pointLight, geometry, directLight );
|
|||
|
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
directLight.color *= all( bvec2( pointLight.shadow, directLight.visible ) ) ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;
|
|||
|
#endif
|
|||
|
|
|||
|
RE_Direct( directLight, geometry, material, reflectedLight );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )
|
|||
|
|
|||
|
SpotLight spotLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
spotLight = spotLights[ i ];
|
|||
|
|
|||
|
getSpotDirectLightIrradiance( spotLight, geometry, directLight );
|
|||
|
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
directLight.color *= all( bvec2( spotLight.shadow, directLight.visible ) ) ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowBias, spotLight.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;
|
|||
|
#endif
|
|||
|
|
|||
|
RE_Direct( directLight, geometry, material, reflectedLight );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct )
|
|||
|
|
|||
|
DirectionalLight directionalLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
directionalLight = directionalLights[ i ];
|
|||
|
|
|||
|
getDirectionalDirectLightIrradiance( directionalLight, geometry, directLight );
|
|||
|
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
directLight.color *= all( bvec2( directionalLight.shadow, directLight.visible ) ) ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
|
|||
|
#endif
|
|||
|
|
|||
|
RE_Direct( directLight, geometry, material, reflectedLight );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )
|
|||
|
|
|||
|
RectAreaLight rectAreaLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
rectAreaLight = rectAreaLights[ i ];
|
|||
|
RE_Direct_RectArea( rectAreaLight, geometry, material, reflectedLight );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( RE_IndirectDiffuse )
|
|||
|
|
|||
|
vec3 irradiance = getAmbientLightIrradiance( ambientLightColor );
|
|||
|
|
|||
|
#if ( NUM_HEMI_LIGHTS > 0 )
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
irradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( RE_IndirectSpecular )
|
|||
|
|
|||
|
vec3 radiance = vec3( 0.0 );
|
|||
|
vec3 clearCoatRadiance = vec3( 0.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
`,mj=`
|
|||
|
#if defined( RE_IndirectDiffuse )
|
|||
|
|
|||
|
#ifdef USE_LIGHTMAP
|
|||
|
|
|||
|
vec3 lightMapIrradiance = texture2D( lightMap, vUv2 ).xyz * lightMapIntensity;
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
lightMapIrradiance *= PI; // factor of PI should not be present; included here to prevent breakage
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
irradiance += lightMapIrradiance;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( USE_ENVMAP ) && defined( PHYSICAL ) && defined( ENVMAP_TYPE_CUBE_UV )
|
|||
|
|
|||
|
irradiance += getLightProbeIndirectIrradiance( /*lightProbe,*/ geometry, maxMipLevel );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( USE_ENVMAP ) && defined( RE_IndirectSpecular )
|
|||
|
|
|||
|
radiance += getLightProbeIndirectRadiance( /*specularLightProbe,*/ geometry, Material_BlinnShininessExponent( material ), maxMipLevel );
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
clearCoatRadiance += getLightProbeIndirectRadiance( /*specularLightProbe,*/ geometry, Material_ClearCoat_BlinnShininessExponent( material ), maxMipLevel );
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,_j=`
|
|||
|
#if defined( RE_IndirectDiffuse )
|
|||
|
|
|||
|
RE_IndirectDiffuse( irradiance, geometry, material, reflectedLight );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( RE_IndirectSpecular )
|
|||
|
|
|||
|
RE_IndirectSpecular( radiance, clearCoatRadiance, geometry, material, reflectedLight );
|
|||
|
|
|||
|
#endif
|
|||
|
`,Fj=`
|
|||
|
#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )
|
|||
|
|
|||
|
gl_FragDepthEXT = log2( vFragDepth ) * logDepthBufFC * 0.5;
|
|||
|
|
|||
|
#endif
|
|||
|
`,bj=`
|
|||
|
#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )
|
|||
|
|
|||
|
uniform float logDepthBufFC;
|
|||
|
varying float vFragDepth;
|
|||
|
|
|||
|
#endif
|
|||
|
`,vj=`
|
|||
|
#ifdef USE_LOGDEPTHBUF
|
|||
|
|
|||
|
#ifdef USE_LOGDEPTHBUF_EXT
|
|||
|
|
|||
|
varying float vFragDepth;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
uniform float logDepthBufFC;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,yj=`
|
|||
|
#ifdef USE_LOGDEPTHBUF
|
|||
|
|
|||
|
#ifdef USE_LOGDEPTHBUF_EXT
|
|||
|
|
|||
|
vFragDepth = 1.0 + gl_Position.w;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
gl_Position.z = log2( max( EPSILON, gl_Position.w + 1.0 ) ) * logDepthBufFC - 1.0;
|
|||
|
|
|||
|
gl_Position.z *= gl_Position.w;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,wj=`
|
|||
|
#ifdef USE_MAP
|
|||
|
|
|||
|
vec4 texelColor = texture2D( map, vUv );
|
|||
|
|
|||
|
texelColor = mapTexelToLinear( texelColor );
|
|||
|
diffuseColor *= texelColor;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Cj=`
|
|||
|
#ifdef USE_MAP
|
|||
|
|
|||
|
uniform sampler2D map;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Ej=`
|
|||
|
#ifdef USE_MAP
|
|||
|
|
|||
|
vec2 uv = ( uvTransform * vec3( gl_PointCoord.x, 1.0 - gl_PointCoord.y, 1 ) ).xy;
|
|||
|
vec4 mapTexel = texture2D( map, uv );
|
|||
|
diffuseColor *= mapTexelToLinear( mapTexel );
|
|||
|
|
|||
|
#endif
|
|||
|
`,kj=`
|
|||
|
#ifdef USE_MAP
|
|||
|
|
|||
|
uniform mat3 uvTransform;
|
|||
|
uniform sampler2D map;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Bj=`
|
|||
|
float metalnessFactor = metalness;
|
|||
|
|
|||
|
#ifdef USE_METALNESSMAP
|
|||
|
|
|||
|
vec4 texelMetalness = texture2D( metalnessMap, vUv );
|
|||
|
|
|||
|
// reads channel B, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
|
|||
|
metalnessFactor *= texelMetalness.b;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Sj=`
|
|||
|
#ifdef USE_METALNESSMAP
|
|||
|
|
|||
|
uniform sampler2D metalnessMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Dj=`
|
|||
|
#ifdef USE_MORPHNORMALS
|
|||
|
|
|||
|
objectNormal += ( morphNormal0 - normal ) * morphTargetInfluences[ 0 ];
|
|||
|
objectNormal += ( morphNormal1 - normal ) * morphTargetInfluences[ 1 ];
|
|||
|
objectNormal += ( morphNormal2 - normal ) * morphTargetInfluences[ 2 ];
|
|||
|
objectNormal += ( morphNormal3 - normal ) * morphTargetInfluences[ 3 ];
|
|||
|
|
|||
|
#endif
|
|||
|
`,xj=`
|
|||
|
#ifdef USE_MORPHTARGETS
|
|||
|
|
|||
|
#ifndef USE_MORPHNORMALS
|
|||
|
|
|||
|
uniform float morphTargetInfluences[ 8 ];
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
uniform float morphTargetInfluences[ 4 ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,Tj=`
|
|||
|
#ifdef USE_MORPHTARGETS
|
|||
|
|
|||
|
transformed += ( morphTarget0 - position ) * morphTargetInfluences[ 0 ];
|
|||
|
transformed += ( morphTarget1 - position ) * morphTargetInfluences[ 1 ];
|
|||
|
transformed += ( morphTarget2 - position ) * morphTargetInfluences[ 2 ];
|
|||
|
transformed += ( morphTarget3 - position ) * morphTargetInfluences[ 3 ];
|
|||
|
|
|||
|
#ifndef USE_MORPHNORMALS
|
|||
|
|
|||
|
transformed += ( morphTarget4 - position ) * morphTargetInfluences[ 4 ];
|
|||
|
transformed += ( morphTarget5 - position ) * morphTargetInfluences[ 5 ];
|
|||
|
transformed += ( morphTarget6 - position ) * morphTargetInfluences[ 6 ];
|
|||
|
transformed += ( morphTarget7 - position ) * morphTargetInfluences[ 7 ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,Ij=`
|
|||
|
#ifdef FLAT_SHADED
|
|||
|
|
|||
|
// Workaround for Adreno/Nexus5 not able able to do dFdx( vViewPosition ) ...
|
|||
|
|
|||
|
vec3 fdx = vec3( dFdx( vViewPosition.x ), dFdx( vViewPosition.y ), dFdx( vViewPosition.z ) );
|
|||
|
vec3 fdy = vec3( dFdy( vViewPosition.x ), dFdy( vViewPosition.y ), dFdy( vViewPosition.z ) );
|
|||
|
vec3 normal = normalize( cross( fdx, fdy ) );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 normal = normalize( vNormal );
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
normal = normal * ( float( gl_FrontFacing ) * 2.0 - 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,Pj=`
|
|||
|
#ifdef USE_NORMALMAP
|
|||
|
|
|||
|
#ifdef OBJECTSPACE_NORMALMAP
|
|||
|
|
|||
|
normal = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0; // overrides both flatShading and attribute normals
|
|||
|
|
|||
|
#ifdef FLIP_SIDED
|
|||
|
|
|||
|
normal = - normal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
normal = normal * ( float( gl_FrontFacing ) * 2.0 - 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
normal = normalize( normalMatrix * normal );
|
|||
|
|
|||
|
#else // tangent-space normal map
|
|||
|
|
|||
|
normal = perturbNormal2Arb( -vViewPosition, normal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#elif defined( USE_BUMPMAP )
|
|||
|
|
|||
|
normal = perturbNormalArb( -vViewPosition, normal, dHdxy_fwd() );
|
|||
|
|
|||
|
#endif
|
|||
|
`,Mj=`
|
|||
|
#ifdef USE_NORMALMAP
|
|||
|
|
|||
|
uniform sampler2D normalMap;
|
|||
|
uniform vec2 normalScale;
|
|||
|
|
|||
|
#ifdef OBJECTSPACE_NORMALMAP
|
|||
|
|
|||
|
uniform mat3 normalMatrix;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
// Per-Pixel Tangent Space Normal Mapping
|
|||
|
// http://hacksoflife.blogspot.ch/2009/11/per-pixel-tangent-space-normal-mapping.html
|
|||
|
|
|||
|
vec3 perturbNormal2Arb( vec3 eye_pos, vec3 surf_norm ) {
|
|||
|
|
|||
|
// Workaround for Adreno 3XX dFd*( vec3 ) bug. See #9988
|
|||
|
|
|||
|
vec3 q0 = vec3( dFdx( eye_pos.x ), dFdx( eye_pos.y ), dFdx( eye_pos.z ) );
|
|||
|
vec3 q1 = vec3( dFdy( eye_pos.x ), dFdy( eye_pos.y ), dFdy( eye_pos.z ) );
|
|||
|
vec2 st0 = dFdx( vUv.st );
|
|||
|
vec2 st1 = dFdy( vUv.st );
|
|||
|
|
|||
|
float scale = sign( st1.t * st0.s - st0.t * st1.s ); // we do not care about the magnitude
|
|||
|
|
|||
|
vec3 S = normalize( ( q0 * st1.t - q1 * st0.t ) * scale );
|
|||
|
vec3 T = normalize( ( - q0 * st1.s + q1 * st0.s ) * scale );
|
|||
|
vec3 N = normalize( surf_norm );
|
|||
|
mat3 tsn = mat3( S, T, N );
|
|||
|
|
|||
|
vec3 mapN = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0;
|
|||
|
|
|||
|
mapN.xy *= normalScale;
|
|||
|
mapN.xy *= ( float( gl_FrontFacing ) * 2.0 - 1.0 );
|
|||
|
|
|||
|
return normalize( tsn * mapN );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,Lj=`
|
|||
|
vec3 packNormalToRGB( const in vec3 normal ) {
|
|||
|
return normalize( normal ) * 0.5 + 0.5;
|
|||
|
}
|
|||
|
|
|||
|
vec3 unpackRGBToNormal( const in vec3 rgb ) {
|
|||
|
return 2.0 * rgb.xyz - 1.0;
|
|||
|
}
|
|||
|
|
|||
|
const float PackUpscale = 256. / 255.; // fraction -> 0..1 (including 1)
|
|||
|
const float UnpackDownscale = 255. / 256.; // 0..1 -> fraction (excluding 1)
|
|||
|
|
|||
|
const vec3 PackFactors = vec3( 256. * 256. * 256., 256. * 256., 256. );
|
|||
|
const vec4 UnpackFactors = UnpackDownscale / vec4( PackFactors, 1. );
|
|||
|
|
|||
|
const float ShiftRight8 = 1. / 256.;
|
|||
|
|
|||
|
vec4 packDepthToRGBA( const in float v ) {
|
|||
|
vec4 r = vec4( fract( v * PackFactors ), v );
|
|||
|
r.yzw -= r.xyz * ShiftRight8; // tidy overflow
|
|||
|
return r * PackUpscale;
|
|||
|
}
|
|||
|
|
|||
|
float unpackRGBAToDepth( const in vec4 v ) {
|
|||
|
return dot( v, UnpackFactors );
|
|||
|
}
|
|||
|
|
|||
|
// NOTE: viewZ/eyeZ is < 0 when in front of the camera per OpenGL conventions
|
|||
|
|
|||
|
float viewZToOrthographicDepth( const in float viewZ, const in float near, const in float far ) {
|
|||
|
return ( viewZ + near ) / ( near - far );
|
|||
|
}
|
|||
|
float orthographicDepthToViewZ( const in float linearClipZ, const in float near, const in float far ) {
|
|||
|
return linearClipZ * ( near - far ) - near;
|
|||
|
}
|
|||
|
|
|||
|
float viewZToPerspectiveDepth( const in float viewZ, const in float near, const in float far ) {
|
|||
|
return (( near + viewZ ) * far ) / (( far - near ) * viewZ );
|
|||
|
}
|
|||
|
float perspectiveDepthToViewZ( const in float invClipZ, const in float near, const in float far ) {
|
|||
|
return ( near * far ) / ( ( far - near ) * invClipZ - far );
|
|||
|
}
|
|||
|
`,Rj=`
|
|||
|
#ifdef PREMULTIPLIED_ALPHA
|
|||
|
|
|||
|
// Get get normal blending with premultipled, use with CustomBlending, OneFactor, OneMinusSrcAlphaFactor, AddEquation.
|
|||
|
gl_FragColor.rgb *= gl_FragColor.a;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Uj=`
|
|||
|
vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );
|
|||
|
|
|||
|
gl_Position = projectionMatrix * mvPosition;
|
|||
|
`,Oj=`
|
|||
|
#if defined( DITHERING )
|
|||
|
|
|||
|
gl_FragColor.rgb = dithering( gl_FragColor.rgb );
|
|||
|
|
|||
|
#endif
|
|||
|
`,Nj=`
|
|||
|
#if defined( DITHERING )
|
|||
|
|
|||
|
// based on https://www.shadertoy.com/view/MslGR8
|
|||
|
vec3 dithering( vec3 color ) {
|
|||
|
//Calculate grid position
|
|||
|
float grid_position = rand( gl_FragCoord.xy );
|
|||
|
|
|||
|
//Shift the individual colors differently, thus making it even harder to see the dithering pattern
|
|||
|
vec3 dither_shift_RGB = vec3( 0.25 / 255.0, -0.25 / 255.0, 0.25 / 255.0 );
|
|||
|
|
|||
|
//modify shift acording to grid position.
|
|||
|
dither_shift_RGB = mix( 2.0 * dither_shift_RGB, -2.0 * dither_shift_RGB, grid_position );
|
|||
|
|
|||
|
//shift the color by dither_shift
|
|||
|
return color + dither_shift_RGB;
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,Hj=`
|
|||
|
float roughnessFactor = roughness;
|
|||
|
|
|||
|
#ifdef USE_ROUGHNESSMAP
|
|||
|
|
|||
|
vec4 texelRoughness = texture2D( roughnessMap, vUv );
|
|||
|
|
|||
|
// reads channel G, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
|
|||
|
roughnessFactor *= texelRoughness.g;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Qj=`
|
|||
|
#ifdef USE_ROUGHNESSMAP
|
|||
|
|
|||
|
uniform sampler2D roughnessMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,jj=`
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
uniform sampler2D directionalShadowMap[ NUM_DIR_LIGHTS ];
|
|||
|
varying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
uniform sampler2D spotShadowMap[ NUM_SPOT_LIGHTS ];
|
|||
|
varying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
uniform sampler2D pointShadowMap[ NUM_POINT_LIGHTS ];
|
|||
|
varying vec4 vPointShadowCoord[ NUM_POINT_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
// TODO (abelnation): create uniforms for area light shadows
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
float texture2DCompare( sampler2D depths, vec2 uv, float compare ) {
|
|||
|
|
|||
|
return step( compare, unpackRGBAToDepth( texture2D( depths, uv ) ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float texture2DShadowLerp( sampler2D depths, vec2 size, vec2 uv, float compare ) {
|
|||
|
|
|||
|
const vec2 offset = vec2( 0.0, 1.0 );
|
|||
|
|
|||
|
vec2 texelSize = vec2( 1.0 ) / size;
|
|||
|
vec2 centroidUV = floor( uv * size + 0.5 ) / size;
|
|||
|
|
|||
|
float lb = texture2DCompare( depths, centroidUV + texelSize * offset.xx, compare );
|
|||
|
float lt = texture2DCompare( depths, centroidUV + texelSize * offset.xy, compare );
|
|||
|
float rb = texture2DCompare( depths, centroidUV + texelSize * offset.yx, compare );
|
|||
|
float rt = texture2DCompare( depths, centroidUV + texelSize * offset.yy, compare );
|
|||
|
|
|||
|
vec2 f = fract( uv * size + 0.5 );
|
|||
|
|
|||
|
float a = mix( lb, lt, f.y );
|
|||
|
float b = mix( rb, rt, f.y );
|
|||
|
float c = mix( a, b, f.x );
|
|||
|
|
|||
|
return c;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord ) {
|
|||
|
|
|||
|
float shadow = 1.0;
|
|||
|
|
|||
|
shadowCoord.xyz /= shadowCoord.w;
|
|||
|
shadowCoord.z += shadowBias;
|
|||
|
|
|||
|
// if ( something && something ) breaks ATI OpenGL shader compiler
|
|||
|
// if ( all( something, something ) ) using this instead
|
|||
|
|
|||
|
bvec4 inFrustumVec = bvec4 ( shadowCoord.x >= 0.0, shadowCoord.x <= 1.0, shadowCoord.y >= 0.0, shadowCoord.y <= 1.0 );
|
|||
|
bool inFrustum = all( inFrustumVec );
|
|||
|
|
|||
|
bvec2 frustumTestVec = bvec2( inFrustum, shadowCoord.z <= 1.0 );
|
|||
|
|
|||
|
bool frustumTest = all( frustumTestVec );
|
|||
|
|
|||
|
if ( frustumTest ) {
|
|||
|
|
|||
|
#if defined( SHADOWMAP_TYPE_PCF )
|
|||
|
|
|||
|
vec2 texelSize = vec2( 1.0 ) / shadowMapSize;
|
|||
|
|
|||
|
float dx0 = - texelSize.x * shadowRadius;
|
|||
|
float dy0 = - texelSize.y * shadowRadius;
|
|||
|
float dx1 = + texelSize.x * shadowRadius;
|
|||
|
float dy1 = + texelSize.y * shadowRadius;
|
|||
|
|
|||
|
shadow = (
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )
|
|||
|
) * ( 1.0 / 9.0 );
|
|||
|
|
|||
|
#elif defined( SHADOWMAP_TYPE_PCF_SOFT )
|
|||
|
|
|||
|
vec2 texelSize = vec2( 1.0 ) / shadowMapSize;
|
|||
|
|
|||
|
float dx0 = - texelSize.x * shadowRadius;
|
|||
|
float dy0 = - texelSize.y * shadowRadius;
|
|||
|
float dx1 = + texelSize.x * shadowRadius;
|
|||
|
float dy1 = + texelSize.y * shadowRadius;
|
|||
|
|
|||
|
shadow = (
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy, shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )
|
|||
|
) * ( 1.0 / 9.0 );
|
|||
|
|
|||
|
#else // no percentage-closer filtering:
|
|||
|
|
|||
|
shadow = texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
return shadow;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// cubeToUV() maps a 3D direction vector suitable for cube texture mapping to a 2D
|
|||
|
// vector suitable for 2D texture mapping. This code uses the following layout for the
|
|||
|
// 2D texture:
|
|||
|
//
|
|||
|
// xzXZ
|
|||
|
// y Y
|
|||
|
//
|
|||
|
// Y - Positive y direction
|
|||
|
// y - Negative y direction
|
|||
|
// X - Positive x direction
|
|||
|
// x - Negative x direction
|
|||
|
// Z - Positive z direction
|
|||
|
// z - Negative z direction
|
|||
|
//
|
|||
|
// Source and test bed:
|
|||
|
// https://gist.github.com/tschw/da10c43c467ce8afd0c4
|
|||
|
|
|||
|
vec2 cubeToUV( vec3 v, float texelSizeY ) {
|
|||
|
|
|||
|
// Number of texels to avoid at the edge of each square
|
|||
|
|
|||
|
vec3 absV = abs( v );
|
|||
|
|
|||
|
// Intersect unit cube
|
|||
|
|
|||
|
float scaleToCube = 1.0 / max( absV.x, max( absV.y, absV.z ) );
|
|||
|
absV *= scaleToCube;
|
|||
|
|
|||
|
// Apply scale to avoid seams
|
|||
|
|
|||
|
// two texels less per square (one texel will do for NEAREST)
|
|||
|
v *= scaleToCube * ( 1.0 - 2.0 * texelSizeY );
|
|||
|
|
|||
|
// Unwrap
|
|||
|
|
|||
|
// space: -1 ... 1 range for each square
|
|||
|
//
|
|||
|
// #X## dim := ( 4 , 2 )
|
|||
|
// # # center := ( 1 , 1 )
|
|||
|
|
|||
|
vec2 planar = v.xy;
|
|||
|
|
|||
|
float almostATexel = 1.5 * texelSizeY;
|
|||
|
float almostOne = 1.0 - almostATexel;
|
|||
|
|
|||
|
if ( absV.z >= almostOne ) {
|
|||
|
|
|||
|
if ( v.z > 0.0 )
|
|||
|
planar.x = 4.0 - v.x;
|
|||
|
|
|||
|
} else if ( absV.x >= almostOne ) {
|
|||
|
|
|||
|
float signX = sign( v.x );
|
|||
|
planar.x = v.z * signX + 2.0 * signX;
|
|||
|
|
|||
|
} else if ( absV.y >= almostOne ) {
|
|||
|
|
|||
|
float signY = sign( v.y );
|
|||
|
planar.x = v.x + 2.0 * signY + 2.0;
|
|||
|
planar.y = v.z * signY - 2.0;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// Transform to UV space
|
|||
|
|
|||
|
// scale := 0.5 / dim
|
|||
|
// translate := ( center + 0.5 ) / dim
|
|||
|
return vec2( 0.125, 0.25 ) * planar + vec2( 0.375, 0.75 );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float getPointShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {
|
|||
|
|
|||
|
vec2 texelSize = vec2( 1.0 ) / ( shadowMapSize * vec2( 4.0, 2.0 ) );
|
|||
|
|
|||
|
// for point lights, the uniform @vShadowCoord is re-purposed to hold
|
|||
|
// the vector from the light to the world-space position of the fragment.
|
|||
|
vec3 lightToPosition = shadowCoord.xyz;
|
|||
|
|
|||
|
// dp = normalized distance from light to fragment position
|
|||
|
float dp = ( length( lightToPosition ) - shadowCameraNear ) / ( shadowCameraFar - shadowCameraNear ); // need to clamp?
|
|||
|
dp += shadowBias;
|
|||
|
|
|||
|
// bd3D = base direction 3D
|
|||
|
vec3 bd3D = normalize( lightToPosition );
|
|||
|
|
|||
|
#if defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_PCF_SOFT )
|
|||
|
|
|||
|
vec2 offset = vec2( - 1, 1 ) * shadowRadius * texelSize.y;
|
|||
|
|
|||
|
return (
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyy, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyy, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyx, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyx, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxy, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxy, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxx, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxx, texelSize.y ), dp )
|
|||
|
) * ( 1.0 / 9.0 );
|
|||
|
|
|||
|
#else // no percentage-closer filtering
|
|||
|
|
|||
|
return texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,Gj=`
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
uniform mat4 directionalShadowMatrix[ NUM_DIR_LIGHTS ];
|
|||
|
varying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
uniform mat4 spotShadowMatrix[ NUM_SPOT_LIGHTS ];
|
|||
|
varying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
uniform mat4 pointShadowMatrix[ NUM_POINT_LIGHTS ];
|
|||
|
varying vec4 vPointShadowCoord[ NUM_POINT_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
// TODO (abelnation): uniforms for area light shadows
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
#endif
|
|||
|
`,zj=`
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
vDirectionalShadowCoord[ i ] = directionalShadowMatrix[ i ] * worldPosition;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
vSpotShadowCoord[ i ] = spotShadowMatrix[ i ] * worldPosition;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
vPointShadowCoord[ i ] = pointShadowMatrix[ i ] * worldPosition;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
// TODO (abelnation): update vAreaShadowCoord with area light info
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
#endif
|
|||
|
`,qj=`
|
|||
|
float getShadowMask() {
|
|||
|
|
|||
|
float shadow = 1.0;
|
|||
|
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
DirectionalLight directionalLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
directionalLight = directionalLights[ i ];
|
|||
|
shadow *= bool( directionalLight.shadow ) ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
SpotLight spotLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
spotLight = spotLights[ i ];
|
|||
|
shadow *= bool( spotLight.shadow ) ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowBias, spotLight.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
PointLight pointLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
pointLight = pointLights[ i ];
|
|||
|
shadow *= bool( pointLight.shadow ) ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
// TODO (abelnation): update shadow for Area light
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return shadow;
|
|||
|
|
|||
|
}
|
|||
|
`,Vj=`
|
|||
|
#ifdef USE_SKINNING
|
|||
|
|
|||
|
mat4 boneMatX = getBoneMatrix( skinIndex.x );
|
|||
|
mat4 boneMatY = getBoneMatrix( skinIndex.y );
|
|||
|
mat4 boneMatZ = getBoneMatrix( skinIndex.z );
|
|||
|
mat4 boneMatW = getBoneMatrix( skinIndex.w );
|
|||
|
|
|||
|
#endif
|
|||
|
`,Wj=`
|
|||
|
#ifdef USE_SKINNING
|
|||
|
|
|||
|
uniform mat4 bindMatrix;
|
|||
|
uniform mat4 bindMatrixInverse;
|
|||
|
|
|||
|
#ifdef BONE_TEXTURE
|
|||
|
|
|||
|
uniform sampler2D boneTexture;
|
|||
|
uniform int boneTextureSize;
|
|||
|
|
|||
|
mat4 getBoneMatrix( const in float i ) {
|
|||
|
|
|||
|
float j = i * 4.0;
|
|||
|
float x = mod( j, float( boneTextureSize ) );
|
|||
|
float y = floor( j / float( boneTextureSize ) );
|
|||
|
|
|||
|
float dx = 1.0 / float( boneTextureSize );
|
|||
|
float dy = 1.0 / float( boneTextureSize );
|
|||
|
|
|||
|
y = dy * ( y + 0.5 );
|
|||
|
|
|||
|
vec4 v1 = texture2D( boneTexture, vec2( dx * ( x + 0.5 ), y ) );
|
|||
|
vec4 v2 = texture2D( boneTexture, vec2( dx * ( x + 1.5 ), y ) );
|
|||
|
vec4 v3 = texture2D( boneTexture, vec2( dx * ( x + 2.5 ), y ) );
|
|||
|
vec4 v4 = texture2D( boneTexture, vec2( dx * ( x + 3.5 ), y ) );
|
|||
|
|
|||
|
mat4 bone = mat4( v1, v2, v3, v4 );
|
|||
|
|
|||
|
return bone;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
uniform mat4 boneMatrices[ MAX_BONES ];
|
|||
|
|
|||
|
mat4 getBoneMatrix( const in float i ) {
|
|||
|
|
|||
|
mat4 bone = boneMatrices[ int(i) ];
|
|||
|
return bone;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,Kj=`
|
|||
|
#ifdef USE_SKINNING
|
|||
|
|
|||
|
vec4 skinVertex = bindMatrix * vec4( transformed, 1.0 );
|
|||
|
|
|||
|
vec4 skinned = vec4( 0.0 );
|
|||
|
skinned += boneMatX * skinVertex * skinWeight.x;
|
|||
|
skinned += boneMatY * skinVertex * skinWeight.y;
|
|||
|
skinned += boneMatZ * skinVertex * skinWeight.z;
|
|||
|
skinned += boneMatW * skinVertex * skinWeight.w;
|
|||
|
|
|||
|
transformed = ( bindMatrixInverse * skinned ).xyz;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Yj=`
|
|||
|
#ifdef USE_SKINNING
|
|||
|
|
|||
|
mat4 skinMatrix = mat4( 0.0 );
|
|||
|
skinMatrix += skinWeight.x * boneMatX;
|
|||
|
skinMatrix += skinWeight.y * boneMatY;
|
|||
|
skinMatrix += skinWeight.z * boneMatZ;
|
|||
|
skinMatrix += skinWeight.w * boneMatW;
|
|||
|
skinMatrix = bindMatrixInverse * skinMatrix * bindMatrix;
|
|||
|
|
|||
|
objectNormal = vec4( skinMatrix * vec4( objectNormal, 0.0 ) ).xyz;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Xj=`
|
|||
|
float specularStrength;
|
|||
|
|
|||
|
#ifdef USE_SPECULARMAP
|
|||
|
|
|||
|
vec4 texelSpecular = texture2D( specularMap, vUv );
|
|||
|
specularStrength = texelSpecular.r;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
specularStrength = 1.0;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Jj=`
|
|||
|
#ifdef USE_SPECULARMAP
|
|||
|
|
|||
|
uniform sampler2D specularMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Zj=`
|
|||
|
#if defined( TONE_MAPPING )
|
|||
|
|
|||
|
gl_FragColor.rgb = toneMapping( gl_FragColor.rgb );
|
|||
|
|
|||
|
#endif
|
|||
|
`,$j=`
|
|||
|
#ifndef saturate
|
|||
|
#define saturate(a) clamp( a, 0.0, 1.0 )
|
|||
|
#endif
|
|||
|
|
|||
|
uniform float toneMappingExposure;
|
|||
|
uniform float toneMappingWhitePoint;
|
|||
|
|
|||
|
// exposure only
|
|||
|
vec3 LinearToneMapping( vec3 color ) {
|
|||
|
|
|||
|
return toneMappingExposure * color;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// source: https://www.cs.utah.edu/~reinhard/cdrom/
|
|||
|
vec3 ReinhardToneMapping( vec3 color ) {
|
|||
|
|
|||
|
color *= toneMappingExposure;
|
|||
|
return saturate( color / ( vec3( 1.0 ) + color ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// source: http://filmicgames.com/archives/75
|
|||
|
#define Uncharted2Helper( x ) max( ( ( x * ( 0.15 * x + 0.10 * 0.50 ) + 0.20 * 0.02 ) / ( x * ( 0.15 * x + 0.50 ) + 0.20 * 0.30 ) ) - 0.02 / 0.30, vec3( 0.0 ) )
|
|||
|
vec3 Uncharted2ToneMapping( vec3 color ) {
|
|||
|
|
|||
|
// John Hable's filmic operator from Uncharted 2 video game
|
|||
|
color *= toneMappingExposure;
|
|||
|
return saturate( Uncharted2Helper( color ) / Uncharted2Helper( vec3( toneMappingWhitePoint ) ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// source: http://filmicgames.com/archives/75
|
|||
|
vec3 OptimizedCineonToneMapping( vec3 color ) {
|
|||
|
|
|||
|
// optimized filmic operator by Jim Hejl and Richard Burgess-Dawson
|
|||
|
color *= toneMappingExposure;
|
|||
|
color = max( vec3( 0.0 ), color - 0.004 );
|
|||
|
return pow( ( color * ( 6.2 * color + 0.5 ) ) / ( color * ( 6.2 * color + 1.7 ) + 0.06 ), vec3( 2.2 ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// source: https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/
|
|||
|
vec3 ACESFilmicToneMapping( vec3 color ) {
|
|||
|
|
|||
|
color *= toneMappingExposure;
|
|||
|
return saturate( ( color * ( 2.51 * color + 0.03 ) ) / ( color * ( 2.43 * color + 0.59 ) + 0.14 ) );
|
|||
|
|
|||
|
}
|
|||
|
`,eG=`
|
|||
|
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
|
|||
|
|
|||
|
varying vec2 vUv;
|
|||
|
|
|||
|
#endif
|
|||
|
`,tG=`
|
|||
|
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
|
|||
|
|
|||
|
varying vec2 vUv;
|
|||
|
uniform mat3 uvTransform;
|
|||
|
|
|||
|
#endif
|
|||
|
`,iG=`
|
|||
|
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
|
|||
|
|
|||
|
vUv = ( uvTransform * vec3( uv, 1 ) ).xy;
|
|||
|
|
|||
|
#endif
|
|||
|
`,nG=`
|
|||
|
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
|
|||
|
|
|||
|
varying vec2 vUv2;
|
|||
|
|
|||
|
#endif
|
|||
|
`,aG=`
|
|||
|
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
|
|||
|
|
|||
|
attribute vec2 uv2;
|
|||
|
varying vec2 vUv2;
|
|||
|
|
|||
|
#endif
|
|||
|
`,sG=`
|
|||
|
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
|
|||
|
|
|||
|
vUv2 = uv2;
|
|||
|
|
|||
|
#endif
|
|||
|
`,rG=`
|
|||
|
#if defined( USE_ENVMAP ) || defined( DISTANCE ) || defined ( USE_SHADOWMAP )
|
|||
|
|
|||
|
vec4 worldPosition = modelMatrix * vec4( transformed, 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
`,oG=`
|
|||
|
uniform sampler2D t2D;
|
|||
|
|
|||
|
varying vec2 vUv;
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vec4 texColor = texture2D( t2D, vUv );
|
|||
|
|
|||
|
gl_FragColor = mapTexelToLinear( texColor );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,lG=`
|
|||
|
varying vec2 vUv;
|
|||
|
uniform mat3 uvTransform;
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vUv = ( uvTransform * vec3( uv, 1 ) ).xy;
|
|||
|
|
|||
|
gl_Position = vec4( position.xy, 1.0, 1.0 );
|
|||
|
|
|||
|
}
|
|||
|
`,uG=`
|
|||
|
uniform samplerCube tCube;
|
|||
|
uniform float tFlip;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
varying vec3 vWorldDirection;
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vec4 texColor = textureCube( tCube, vec3( tFlip * vWorldDirection.x, vWorldDirection.yz ) );
|
|||
|
|
|||
|
gl_FragColor = mapTexelToLinear( texColor );
|
|||
|
gl_FragColor.a *= opacity;
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,cG=`
|
|||
|
varying vec3 vWorldDirection;
|
|||
|
|
|||
|
#include <common>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vWorldDirection = transformDirection( position, modelMatrix );
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
|
|||
|
gl_Position.z = gl_Position.w; // set z to camera.far
|
|||
|
|
|||
|
}
|
|||
|
`,dG=`
|
|||
|
#if DEPTH_PACKING == 3200
|
|||
|
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( 1.0 );
|
|||
|
|
|||
|
#if DEPTH_PACKING == 3200
|
|||
|
|
|||
|
diffuseColor.a = opacity;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <map_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
|
|||
|
#if DEPTH_PACKING == 3200
|
|||
|
|
|||
|
gl_FragColor = vec4( vec3( 1.0 - gl_FragCoord.z ), opacity );
|
|||
|
|
|||
|
#elif DEPTH_PACKING == 3201
|
|||
|
|
|||
|
gl_FragColor = packDepthToRGBA( gl_FragCoord.z );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
`,fG=`
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
#include <skinbase_vertex>
|
|||
|
|
|||
|
#ifdef USE_DISPLACEMENTMAP
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,hG=`
|
|||
|
#define DISTANCE
|
|||
|
|
|||
|
uniform vec3 referencePosition;
|
|||
|
uniform float nearDistance;
|
|||
|
uniform float farDistance;
|
|||
|
varying vec3 vWorldPosition;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main () {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( 1.0 );
|
|||
|
|
|||
|
#include <map_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
|
|||
|
float dist = length( vWorldPosition - referencePosition );
|
|||
|
dist = ( dist - nearDistance ) / ( farDistance - nearDistance );
|
|||
|
dist = saturate( dist ); // clamp to [ 0, 1 ]
|
|||
|
|
|||
|
gl_FragColor = packDepthToRGBA( dist );
|
|||
|
|
|||
|
}
|
|||
|
`,AG=`
|
|||
|
#define DISTANCE
|
|||
|
|
|||
|
varying vec3 vWorldPosition;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
#include <skinbase_vertex>
|
|||
|
|
|||
|
#ifdef USE_DISPLACEMENTMAP
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
vWorldPosition = worldPosition.xyz;
|
|||
|
|
|||
|
}
|
|||
|
`,gG=`
|
|||
|
uniform sampler2D tEquirect;
|
|||
|
|
|||
|
varying vec3 vWorldDirection;
|
|||
|
|
|||
|
#include <common>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vec3 direction = normalize( vWorldDirection );
|
|||
|
|
|||
|
vec2 sampleUV;
|
|||
|
|
|||
|
sampleUV.y = asin( clamp( direction.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
|
|||
|
|
|||
|
sampleUV.x = atan( direction.z, direction.x ) * RECIPROCAL_PI2 + 0.5;
|
|||
|
|
|||
|
vec4 texColor = texture2D( tEquirect, sampleUV );
|
|||
|
|
|||
|
gl_FragColor = mapTexelToLinear( texColor );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,pG=`
|
|||
|
varying vec3 vWorldDirection;
|
|||
|
|
|||
|
#include <common>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vWorldDirection = transformDirection( position, modelMatrix );
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,mG=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
uniform float dashSize;
|
|||
|
uniform float totalSize;
|
|||
|
|
|||
|
varying float vLineDistance;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
if ( mod( vLineDistance, totalSize ) > dashSize ) {
|
|||
|
|
|||
|
discard;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 outgoingLight = vec3( 0.0 );
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
|
|||
|
outgoingLight = diffuseColor.rgb; // simple shader
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,_G=`
|
|||
|
uniform float scale;
|
|||
|
attribute float lineDistance;
|
|||
|
|
|||
|
varying float vLineDistance;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <color_vertex>
|
|||
|
|
|||
|
vLineDistance = scale * lineDistance;
|
|||
|
|
|||
|
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
|
|||
|
gl_Position = projectionMatrix * mvPosition;
|
|||
|
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,FG=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <uv2_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <aomap_pars_fragment>
|
|||
|
#include <lightmap_pars_fragment>
|
|||
|
#include <envmap_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <specularmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <specularmap_fragment>
|
|||
|
|
|||
|
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
|
|||
|
|
|||
|
// accumulation (baked indirect lighting only)
|
|||
|
#ifdef USE_LIGHTMAP
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += texture2D( lightMap, vUv2 ).xyz * lightMapIntensity;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += vec3( 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
// modulation
|
|||
|
#include <aomap_fragment>
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse *= diffuseColor.rgb;
|
|||
|
|
|||
|
vec3 outgoingLight = reflectedLight.indirectDiffuse;
|
|||
|
|
|||
|
#include <envmap_fragment>
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,bG=`
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <uv2_pars_vertex>
|
|||
|
#include <envmap_pars_vertex>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
#include <uv2_vertex>
|
|||
|
#include <color_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <envmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,vG=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform vec3 emissive;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
varying vec3 vLightFront;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
varying vec3 vLightBack;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <dithering_pars_fragment>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <uv2_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <aomap_pars_fragment>
|
|||
|
#include <lightmap_pars_fragment>
|
|||
|
#include <emissivemap_pars_fragment>
|
|||
|
#include <envmap_pars_fragment>
|
|||
|
#include <bsdfs>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <shadowmap_pars_fragment>
|
|||
|
#include <shadowmask_pars_fragment>
|
|||
|
#include <specularmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
|
|||
|
vec3 totalEmissiveRadiance = emissive;
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <specularmap_fragment>
|
|||
|
#include <emissivemap_fragment>
|
|||
|
|
|||
|
// accumulation
|
|||
|
reflectedLight.indirectDiffuse = getAmbientLightIrradiance( ambientLightColor );
|
|||
|
|
|||
|
#include <lightmap_fragment>
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse *= BRDF_Diffuse_Lambert( diffuseColor.rgb );
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
reflectedLight.directDiffuse = ( gl_FrontFacing ) ? vLightFront : vLightBack;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
reflectedLight.directDiffuse = vLightFront;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
reflectedLight.directDiffuse *= BRDF_Diffuse_Lambert( diffuseColor.rgb ) * getShadowMask();
|
|||
|
|
|||
|
// modulation
|
|||
|
#include <aomap_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;
|
|||
|
|
|||
|
#include <envmap_fragment>
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <dithering_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,yG=`
|
|||
|
#define LAMBERT
|
|||
|
|
|||
|
varying vec3 vLightFront;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
varying vec3 vLightBack;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <uv2_pars_vertex>
|
|||
|
#include <envmap_pars_vertex>
|
|||
|
#include <bsdfs>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <shadowmap_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
#include <uv2_vertex>
|
|||
|
#include <color_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <envmap_vertex>
|
|||
|
#include <lights_lambert_vertex>
|
|||
|
#include <shadowmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,wG=`
|
|||
|
#define MATCAP
|
|||
|
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
uniform sampler2D matcap;
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <bumpmap_pars_fragment>
|
|||
|
#include <normalmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <normal_fragment_begin>
|
|||
|
#include <normal_fragment_maps>
|
|||
|
|
|||
|
vec3 viewDir = normalize( vViewPosition );
|
|||
|
vec3 x = normalize( vec3( viewDir.z, 0.0, - viewDir.x ) );
|
|||
|
vec3 y = cross( viewDir, x );
|
|||
|
vec2 uv = vec2( dot( x, normal ), dot( y, normal ) ) * 0.495 + 0.5; // 0.495 to remove artifacts caused by undersized matcap disks
|
|||
|
|
|||
|
#ifdef USE_MATCAP
|
|||
|
|
|||
|
vec4 matcapColor = texture2D( matcap, uv );
|
|||
|
matcapColor = matcapTexelToLinear( matcapColor );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 matcapColor = vec4( 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
vec3 outgoingLight = diffuseColor.rgb * matcapColor.rgb;
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,CG=`
|
|||
|
#define MATCAP
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
|
|||
|
|
|||
|
vNormal = normalize( transformedNormal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
|
|||
|
}
|
|||
|
`,EG=`
|
|||
|
#define PHONG
|
|||
|
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform vec3 emissive;
|
|||
|
uniform vec3 specular;
|
|||
|
uniform float shininess;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <dithering_pars_fragment>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <uv2_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <aomap_pars_fragment>
|
|||
|
#include <lightmap_pars_fragment>
|
|||
|
#include <emissivemap_pars_fragment>
|
|||
|
#include <envmap_pars_fragment>
|
|||
|
#include <gradientmap_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <bsdfs>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <lights_phong_pars_fragment>
|
|||
|
#include <shadowmap_pars_fragment>
|
|||
|
#include <bumpmap_pars_fragment>
|
|||
|
#include <normalmap_pars_fragment>
|
|||
|
#include <specularmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
|
|||
|
vec3 totalEmissiveRadiance = emissive;
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <specularmap_fragment>
|
|||
|
#include <normal_fragment_begin>
|
|||
|
#include <normal_fragment_maps>
|
|||
|
#include <emissivemap_fragment>
|
|||
|
|
|||
|
// accumulation
|
|||
|
#include <lights_phong_fragment>
|
|||
|
#include <lights_fragment_begin>
|
|||
|
#include <lights_fragment_maps>
|
|||
|
#include <lights_fragment_end>
|
|||
|
|
|||
|
// modulation
|
|||
|
#include <aomap_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;
|
|||
|
|
|||
|
#include <envmap_fragment>
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <dithering_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,kG=`
|
|||
|
#define PHONG
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <uv2_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <envmap_pars_vertex>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <shadowmap_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
#include <uv2_vertex>
|
|||
|
#include <color_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
|
|||
|
|
|||
|
vNormal = normalize( transformedNormal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <envmap_vertex>
|
|||
|
#include <shadowmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,BG=`
|
|||
|
#define PHYSICAL
|
|||
|
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform vec3 emissive;
|
|||
|
uniform float roughness;
|
|||
|
uniform float metalness;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
uniform float clearCoat;
|
|||
|
uniform float clearCoatRoughness;
|
|||
|
#endif
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <dithering_pars_fragment>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <uv2_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <aomap_pars_fragment>
|
|||
|
#include <lightmap_pars_fragment>
|
|||
|
#include <emissivemap_pars_fragment>
|
|||
|
#include <bsdfs>
|
|||
|
#include <cube_uv_reflection_fragment>
|
|||
|
#include <envmap_pars_fragment>
|
|||
|
#include <envmap_physical_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <lights_physical_pars_fragment>
|
|||
|
#include <shadowmap_pars_fragment>
|
|||
|
#include <bumpmap_pars_fragment>
|
|||
|
#include <normalmap_pars_fragment>
|
|||
|
#include <roughnessmap_pars_fragment>
|
|||
|
#include <metalnessmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
|
|||
|
vec3 totalEmissiveRadiance = emissive;
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <roughnessmap_fragment>
|
|||
|
#include <metalnessmap_fragment>
|
|||
|
#include <normal_fragment_begin>
|
|||
|
#include <normal_fragment_maps>
|
|||
|
#include <emissivemap_fragment>
|
|||
|
|
|||
|
// accumulation
|
|||
|
#include <lights_physical_fragment>
|
|||
|
#include <lights_fragment_begin>
|
|||
|
#include <lights_fragment_maps>
|
|||
|
#include <lights_fragment_end>
|
|||
|
|
|||
|
// modulation
|
|||
|
#include <aomap_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <dithering_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,SG=`
|
|||
|
#define PHYSICAL
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <uv2_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <shadowmap_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
#include <uv2_vertex>
|
|||
|
#include <color_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
|
|||
|
|
|||
|
vNormal = normalize( transformedNormal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <shadowmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,DG=`
|
|||
|
#define NORMAL
|
|||
|
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <packing>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <bumpmap_pars_fragment>
|
|||
|
#include <normalmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <normal_fragment_begin>
|
|||
|
#include <normal_fragment_maps>
|
|||
|
|
|||
|
gl_FragColor = vec4( packNormalToRGB( normal ), opacity );
|
|||
|
|
|||
|
}
|
|||
|
`,xG=`
|
|||
|
#define NORMAL
|
|||
|
|
|||
|
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
|
|||
|
|
|||
|
vNormal = normalize( transformedNormal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
|
|||
|
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
|
|||
|
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
`,TG=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <map_particle_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = vec3( 0.0 );
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_particle_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
|
|||
|
outgoingLight = diffuseColor.rgb;
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,IG=`
|
|||
|
uniform float size;
|
|||
|
uniform float scale;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <color_vertex>
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
|
|||
|
gl_PointSize = size;
|
|||
|
|
|||
|
#ifdef USE_SIZEATTENUATION
|
|||
|
|
|||
|
bool isPerspective = ( projectionMatrix[ 2 ][ 3 ] == - 1.0 );
|
|||
|
|
|||
|
if ( isPerspective ) gl_PointSize *= ( scale / - mvPosition.z );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,PG=`
|
|||
|
uniform vec3 color;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <bsdfs>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <shadowmap_pars_fragment>
|
|||
|
#include <shadowmask_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
gl_FragColor = vec4( color, opacity * ( 1.0 - getShadowMask() ) );
|
|||
|
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,MG=`
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <shadowmap_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <shadowmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,LG=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = vec3( 0.0 );
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
|
|||
|
outgoingLight = diffuseColor.rgb;
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,RG=`
|
|||
|
uniform float rotation;
|
|||
|
uniform vec2 center;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
vec4 mvPosition = modelViewMatrix * vec4( 0.0, 0.0, 0.0, 1.0 );
|
|||
|
|
|||
|
vec2 scale;
|
|||
|
scale.x = length( vec3( modelMatrix[ 0 ].x, modelMatrix[ 0 ].y, modelMatrix[ 0 ].z ) );
|
|||
|
scale.y = length( vec3( modelMatrix[ 1 ].x, modelMatrix[ 1 ].y, modelMatrix[ 1 ].z ) );
|
|||
|
|
|||
|
#ifndef USE_SIZEATTENUATION
|
|||
|
|
|||
|
bool isPerspective = ( projectionMatrix[ 2 ][ 3 ] == - 1.0 );
|
|||
|
|
|||
|
if ( isPerspective ) scale *= - mvPosition.z;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
vec2 alignedPosition = ( position.xy - ( center - vec2( 0.5 ) ) ) * scale;
|
|||
|
|
|||
|
vec2 rotatedPosition;
|
|||
|
rotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;
|
|||
|
rotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;
|
|||
|
|
|||
|
mvPosition.xy += rotatedPosition;
|
|||
|
|
|||
|
gl_Position = projectionMatrix * mvPosition;
|
|||
|
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`;var Qt={alphamap_fragment:kQ,alphamap_pars_fragment:BQ,alphatest_fragment:SQ,aomap_fragment:DQ,aomap_pars_fragment:xQ,begin_vertex:TQ,beginnormal_vertex:IQ,bsdfs:PQ,bumpmap_pars_fragment:MQ,clipping_planes_fragment:LQ,clipping_planes_pars_fragment:RQ,clipping_planes_pars_vertex:UQ,clipping_planes_vertex:OQ,color_fragment:NQ,color_pars_fragment:HQ,color_pars_vertex:QQ,color_vertex:jQ,common:GQ,cube_uv_reflection_fragment:zQ,defaultnormal_vertex:qQ,displacementmap_pars_vertex:VQ,displacementmap_vertex:WQ,emissivemap_fragment:KQ,emissivemap_pars_fragment:YQ,encodings_fragment:XQ,encodings_pars_fragment:JQ,envmap_fragment:ZQ,envmap_pars_fragment:$Q,envmap_pars_vertex:ej,envmap_physical_pars_fragment:dj,envmap_vertex:tj,fog_vertex:ij,fog_pars_vertex:nj,fog_fragment:aj,fog_pars_fragment:sj,gradientmap_pars_fragment:rj,lightmap_fragment:oj,lightmap_pars_fragment:lj,lights_lambert_vertex:uj,lights_pars_begin:cj,lights_phong_fragment:fj,lights_phong_pars_fragment:hj,lights_physical_fragment:Aj,lights_physical_pars_fragment:gj,lights_fragment_begin:pj,lights_fragment_maps:mj,lights_fragment_end:_j,logdepthbuf_fragment:Fj,logdepthbuf_pars_fragment:bj,logdepthbuf_pars_vertex:vj,logdepthbuf_vertex:yj,map_fragment:wj,map_pars_fragment:Cj,map_particle_fragment:Ej,map_particle_pars_fragment:kj,metalnessmap_fragment:Bj,metalnessmap_pars_fragment:Sj,morphnormal_vertex:Dj,morphtarget_pars_vertex:xj,morphtarget_vertex:Tj,normal_fragment_begin:Ij,normal_fragment_maps:Pj,normalmap_pars_fragment:Mj,packing:Lj,premultiplied_alpha_fragment:Rj,project_vertex:Uj,dithering_fragment:Oj,dithering_pars_fragment:Nj,roughnessmap_fragment:Hj,roughnessmap_pars_fragment:Qj,shadowmap_pars_fragment:jj,shadowmap_pars_vertex:Gj,shadowmap_vertex:zj,shadowmask_pars_fragment:qj,skinbase_vertex:Vj,skinning_pars_vertex:Wj,skinning_vertex:Kj,skinnormal_vertex:Yj,specularmap_fragment:Xj,specularmap_pars_fragment:Jj,tonemapping_fragment:Zj,tonemapping_pars_fragment:$j,uv_pars_fragment:eG,uv_pars_vertex:tG,uv_vertex:iG,uv2_pars_fragment:nG,uv2_pars_vertex:aG,uv2_vertex:sG,worldpos_vertex:rG,background_frag:oG,background_vert:lG,cube_frag:uG,cube_vert:cG,depth_frag:dG,depth_vert:fG,distanceRGBA_frag:hG,distanceRGBA_vert:AG,equirect_frag:gG,equirect_vert:pG,linedashed_frag:mG,linedashed_vert:_G,meshbasic_frag:FG,meshbasic_vert:bG,meshlambert_frag:vG,meshlambert_vert:yG,meshmatcap_frag:wG,meshmatcap_vert:CG,meshphong_frag:EG,meshphong_vert:kG,meshphysical_frag:BG,meshphysical_vert:SG,normal_frag:DG,normal_vert:xG,points_frag:TG,points_vert:IG,shadow_frag:PG,shadow_vert:MG,sprite_frag:LG,sprite_vert:RG};function uc(e){var t={};for(var i in e){t[i]={};for(var n in e[i]){var a=e[i][n];a&&(a.isColor||a.isMatrix3||a.isMatrix4||a.isVector2||a.isVector3||a.isVector4||a.isTexture)?t[i][n]=a.clone():Array.isArray(a)?t[i][n]=a.slice():t[i][n]=a}}return t}function Qn(e){for(var t={},i=0;i<e.length;i++){var n=uc(e[i]);for(var a in n)t[a]=n[a]}return t}var UG={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush
|
|||
|
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
|
|||
|
}`,this.fragmentShader=`void main() {
|
|||
|
gl_FragColor = vec4( 1.0, 0.0, 0.0, 1.0 );
|
|||
|
}`,this.linewidth=1,this.wireframe=!1,this.wireframeLinewidth=1,this.fog=!1,this.lights=!1,this.clipping=!1,this.skinning=!1,this.morphTargets=!1,this.morphNormals=!1,this.extensions={derivatives:!1,fragDepth:!1,drawBuffers:!1,shaderTextureLOD:!1},this.defaultAttributeValues={color:[1,1,1],uv:[0,0],uv2:[0,0]},this.index0AttributeName=void 0,this.uniformsNeedUpdate=!1,e!==void 0&&(e.attributes!==void 0&&console.error("THREE.ShaderMaterial: attributes should now be defined in THREE.BufferGeometry instead."),this.setValues(e))}Kn.prototype=Object.create(St.prototype);Kn.prototype.constructor=Kn;Kn.prototype.isShaderMaterial=!0;Kn.prototype.copy=function(e){return St.prototype.copy.call(this,e),this.fragmentShader=e.fragmentShader,this.vertexShader=e.vertexShader,this.uniforms=uc(e.uniforms),this.defines=Object.assign({},e.defines),this.wireframe=e.wireframe,this.wireframeLinewidth=e.wireframeLinewidth,this.lights=e.lights,this.clipping=e.clipping,this.skinning=e.skinning,this.morphTargets=e.morphTargets,this.morphNormals=e.morphNormals,this.extensions=e.extensions,this};Kn.prototype.toJSON=function(e){var t=St.prototype.toJSON.call(this,e);t.uniforms={};for(var i in this.uniforms){var n=this.uniforms[i],a=n.value;a&&a.isTexture?t.uniforms[i]={type:"t",value:a.toJSON(e).uuid}:a&&a.isColor?t.uniforms[i]={type:"c",value:a.getHex()}:a&&a.isVector2?t.uniforms[i]={type:"v2",value:a.toArray()}:a&&a.isVector3?t.uniforms[i]={type:"v3",value:a.toArray()}:a&&a.isVector4?t.uniforms[i]={type:"v4",value:a.toArray()}:a&&a.isMatrix3?t.uniforms[i]={type:"m3",value:a.toArray()}:a&&a.isMatrix4?t.uniforms[i]={type:"m4",value:a.toArray()}:t.uniforms[i]={value:a}}Object.keys(this.defines).length>0&&(t.defines=this.defines),t.vertexShader=this.vertexShader,t.fragmentShader=this.fragmentShader;var s={};for(var r in this.extensions)this.extensions[r]===!0&&(s[r]=!0);return Object.keys(s).length>0&&(t.extensions=s),t};function jl(e,t){this.origin=e!==void 0?e:new ee,this.direction=t!==void 0?t:new ee}Object.assign(jl.prototype,{set:function(e,t){return this.origin.copy(e),this.direction.copy(t),this},clone:function(){return new this.constructor().copy(this)},copy:function(e){return this.origin.copy(e.origin),this.direction.copy(e.direction),this},at:function(e,t){return t===void 0&&(console.warn("THREE.Ray: .at() target is now required"),t=new ee),t.copy(this.direction).multiplyScalar(e).add(this.origin)},lookAt:function(e){return this.direction.copy(e).sub(this.origin).normalize(),this},recast:function(){var e=new ee;return function(i){return this.origin.copy(this.at(i,e)),this}}(),closestPointToPoint:function(e,t){t===void 0&&(console.warn("THREE.Ray: .closestPointToPoint() target is now required"),t=new ee),t.subVectors(e,this.origin);var i=t.dot(this.direction);return i<0?t.copy(this.origin):t.copy(this.direction).multiplyScalar(i).add(this.origin)},distanceToPoint:function(e){return Math.sqrt(this.distanceSqToPoint(e))},distanceSqToPoint:function(){var e=new ee;return function(i){var n=e.subVectors(i,this.origin).dot(this.direction);return n<0?this.origin.distanceToSquared(i):(e.copy(this.direction).multiplyScalar(n).add(this.origin),e.distanceToSquared(i))}}(),distanceSqToSegment:function(){var e=new ee,t=new ee,i=new ee;return function(a,s,r,o){e.copy(a).add(s).multiplyScalar(.5),t.copy(s).sub(a).normalize(),i.copy(this.origin).sub(e);var l=a.distanceTo(s)*.5,u=-this.direction.dot(t),f=i.dot(this.direction),h=-i.dot(t),A=i.lengthSq(),p=Math.abs(1-u*u),F,y,E,w;if(p>0)if(F=u*h-f,y=u*f-h,w=l*p,F>=0)if(y>=-w)if(y<=w){var C=1/p;F*=C,y*=C,E=F*(F+u*y+2*f)+y*(u*F+y+2*h)+A}else y=l,F=Math.max(0,-(u*y+f)),E=-F*F+y*(y+2*h)+A;else y=-l,F=Math.max(0,-(u*y+f)),E=-F*F+y*(y+2*h)+A;else y<=-w?(F=Math.max(0,-(-u*l+f)),y=F>0?-l:Math.min(Math.max(-l,-h),l),E=-F*F+y*(y+2*h)+A):y<=w?(F=0,y=Math.min(Math.max(-l,-h),l),E=y*(y+2*h)+A):(F=Math.max(0,-(u*l+f)),y=F>0?l:Math.min(Math.max(-l,-h),l),E=-F*F+y*(y+2*h)+A);else y=u>0?-l:l,F=Math.max(0,-(u*y+f)),E=-F*F+y*(y+2*h)+A;return r&&r.copy(this.direction).multiplyScalar(F).add(this.origin),o&&o.copy(t).multiplyScalar(y
|
|||
|
`),i=0;i<t.length;i++)t[i]=i+1+": "+t[i];return t.join(`
|
|||
|
`)}function qb(e,t,i){var n=e.createShader(t);return e.shaderSource(n,i),e.compileShader(n),e.getShaderParameter(n,e.COMPILE_STATUS)===!1&&console.error("THREE.WebGLShader: Shader couldn't compile."),e.getShaderInfoLog(n)!==""&&console.warn("THREE.WebGLShader: gl.getShaderInfoLog()",t===e.VERTEX_SHADER?"vertex":"fragment",e.getShaderInfoLog(n),kz(i)),n}var Bz=0;function d4(e){switch(e){case $f:return["Linear","( value )"];case mQ:return["sRGB","( value )"];case _Q:return["RGBE","( value )"];case FQ:return["RGBM","( value, 7.0 )"];case bQ:return["RGBM","( value, 16.0 )"];case vQ:return["RGBD","( value, 256.0 )"];case Z3:return["Gamma","( value, float( GAMMA_FACTOR ) )"];default:throw new Error("unsupported encoding: "+e)}}function Ld(e,t){var i=d4(t);return"vec4 "+e+"( vec4 value ) { return "+i[0]+"ToLinear"+i[1]+"; }"}function Sz(e,t){var i=d4(t);return"vec4 "+e+"( vec4 value ) { return LinearTo"+i[0]+i[1]+"; }"}function Dz(e,t){var i;switch(t){case V3:i="Linear";break;case UH:i="Reinhard";break;case OH:i="Uncharted2";break;case NH:i="OptimizedCineon";break;case HH:i="ACESFilmic";break;default:throw new Error("unsupported toneMapping: "+t)}return"vec3 "+e+"( vec3 color ) { return "+i+"ToneMapping( color ); }"}function xz(e,t,i){e=e||{};var n=[e.derivatives||t.envMapCubeUV||t.bumpMap||t.normalMap&&!t.objectSpaceNormalMap||t.flatShading?"#extension GL_OES_standard_derivatives : enable":"",(e.fragDepth||t.logarithmicDepthBuffer)&&i.get("EXT_frag_depth")?"#extension GL_EXT_frag_depth : enable":"",e.drawBuffers&&i.get("WEBGL_draw_buffers")?"#extension GL_EXT_draw_buffers : require":"",(e.shaderTextureLOD||t.envMap)&&i.get("EXT_shader_texture_lod")?"#extension GL_EXT_shader_texture_lod : enable":""];return n.filter(_u).join(`
|
|||
|
`)}function Tz(e){var t=[];for(var i in e){var n=e[i];n!==!1&&t.push("#define "+i+" "+n)}return t.join(`
|
|||
|
`)}function Iz(e,t){for(var i={},n=e.getProgramParameter(t,e.ACTIVE_ATTRIBUTES),a=0;a<n;a++){var s=e.getActiveAttrib(t,a),r=s.name;i[r]=e.getAttribLocation(t,r)}return i}function _u(e){return e!==""}function Vb(e,t){return e.replace(/NUM_DIR_LIGHTS/g,t.numDirLights).replace(/NUM_SPOT_LIGHTS/g,t.numSpotLights).replace(/NUM_RECT_AREA_LIGHTS/g,t.numRectAreaLights).replace(/NUM_POINT_LIGHTS/g,t.numPointLights).replace(/NUM_HEMI_LIGHTS/g,t.numHemiLights)}function Wb(e,t){return e.replace(/NUM_CLIPPING_PLANES/g,t.numClippingPlanes).replace(/UNION_CLIPPING_PLANES/g,t.numClippingPlanes-t.numClipIntersection)}function Bm(e){var t=/^[ \t]*#include +<([\w\d./]+)>/gm;function i(n,a){var s=Qt[a];if(s===void 0)throw new Error("Can not resolve #include <"+a+">");return Bm(s)}return e.replace(t,i)}function Kb(e){var t=/#pragma unroll_loop[\s]+?for \( int i \= (\d+)\; i < (\d+)\; i \+\+ \) \{([\s\S]+?)(?=\})\}/g;function i(n,a,s,r){for(var o="",l=parseInt(a);l<parseInt(s);l++)o+=r.replace(/\[ i \]/g,"[ "+l+" ]");return o}return e.replace(t,i)}function Pz(e,t,i,n,a,s,r){var o=e.context,l=n.defines,u=a.vertexShader,f=a.fragmentShader,h="SHADOWMAP_TYPE_BASIC";s.shadowMapType===N3?h="SHADOWMAP_TYPE_PCF":s.shadowMapType===pH&&(h="SHADOWMAP_TYPE_PCF_SOFT");var A="ENVMAP_TYPE_CUBE",p="ENVMAP_MODE_REFLECTION",F="ENVMAP_BLENDING_MULTIPLY";if(s.envMap){switch(n.envMap.mapping){case C1:case Fm:A="ENVMAP_TYPE_CUBE";break;case E1:case k1:A="ENVMAP_TYPE_CUBE_UV";break;case W3:case bm:A="ENVMAP_TYPE_EQUIREC";break;case K3:A="ENVMAP_TYPE_SPHERE";break}switch(n.envMap.mapping){case Fm:case bm:p="ENVMAP_MODE_REFRACTION";break}switch(n.combine){case SA:F="ENVMAP_BLENDING_MULTIPLY";break;case LH:F="ENVMAP_BLENDING_MIX";break;case RH:F="ENVMAP_BLENDING_ADD";break}}var y=e.gammaFactor>0?e.gammaFactor:1,E=r.isWebGL2?"":xz(n.extensions,s,t),w=Tz(l),C=o.createProgram(),B,S;if(n.isRawShaderMaterial?(B=[w].filter(_u).join(`
|
|||
|
`),B.length>0&&(B+=`
|
|||
|
`),S=[E,w].filter(_u).join(`
|
|||
|
`),S.length>0&&(S+=`
|
|||
|
`)):(B=["precision "+s.precision+" float;","precision "+s.precision+" int;","#define SHADER_NAME "+a.name,w,s.supportsVertexTextures?"#define VERTEX_TEXTURES":"","#define GAMMA_FACTOR "+y,"#define MAX_BONES "+s.maxBones,s.useFog&&s.fog?"#define USE_FOG":"",s.useFog&&s.fogExp?"#define FOG_EXP2":"",s.map?"#define USE_MAP":"",s.envMap?"#define USE_ENVMAP":"",s.envMap?"#define "+p:"",s.lightMap?"#define USE_LIGHTMAP":"",s.aoMap?"#define USE_AOMAP":"",s.emissiveMap?"#define USE_EMISSIVEMAP":"",s.bumpMap?"#define USE_BUMPMAP":"",s.normalMap?"#define USE_NORMALMAP":"",s.normalMap&&s.objectSpaceNormalMap?"#define OBJECTSPACE_NORMALMAP":"",s.displacementMap&&s.supportsVertexTextures?"#define USE_DISPLACEMENTMAP":"",s.specularMap?"#define USE_SPECULARMAP":"",s.roughnessMap?"#define USE_ROUGHNESSMAP":"",s.metalnessMap?"#define USE_METALNESSMAP":"",s.alphaMap?"#define USE_ALPHAMAP":"",s.vertexColors?"#define USE_COLOR":"",s.flatShading?"#define FLAT_SHADED":"",s.skinning?"#define USE_SKINNING":"",s.useVertexTexture?"#define BONE_TEXTURE":"",s.morphTargets?"#define USE_MORPHTARGETS":"",s.morphNormals&&s.flatShading===!1?"#define USE_MORPHNORMALS":"",s.doubleSided?"#define DOUBLE_SIDED":"",s.flipSided?"#define FLIP_SIDED":"",s.shadowMapEnabled?"#define USE_SHADOWMAP":"",s.shadowMapEnabled?"#define "+h:"",s.sizeAttenuation?"#define USE_SIZEATTENUATION":"",s.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"",s.logarithmicDepthBuffer&&(r.isWebGL2||t.get("EXT_frag_depth"))?"#define USE_LOGDEPTHBUF_EXT":"","uniform mat4 modelMatrix;","uniform mat4 modelViewMatrix;","uniform mat4 projectionMatrix;","uniform mat4 viewMatrix;","uniform mat3 normalMatrix;","uniform vec3 cameraPosition;","attribute vec3 position;","attribute vec3 normal;","attribute vec2 uv;","#ifdef USE_COLOR"," attribute vec3 color;","#endif","#ifdef USE_MORPHTARGETS"," attribute vec3 morphTarget0;"," attribute vec3 morphTarget1;"," attribute vec3 morphTarget2;"," attribute vec3 morphTarget3;"," #ifdef USE_MORPHNORMALS"," attribute vec3 morphNormal0;"," attribute vec3 morphNormal1;"," attribute vec3 morphNormal2;"," attribute vec3 morphNormal3;"," #else"," attribute vec3 morphTarget4;"," attribute vec3 morphTarget5;"," attribute vec3 morphTarget6;"," attribute vec3 morphTarget7;"," #endif","#endif","#ifdef USE_SKINNING"," attribute vec4 skinIndex;"," attribute vec4 skinWeight;","#endif",`
|
|||
|
`].filter(_u).join(`
|
|||
|
`),S=[E,"precision "+s.precision+" float;","precision "+s.precision+" int;","#define SHADER_NAME "+a.name,w,s.alphaTest?"#define ALPHATEST "+s.alphaTest+(s.alphaTest%1?"":".0"):"","#define GAMMA_FACTOR "+y,s.useFog&&s.fog?"#define USE_FOG":"",s.useFog&&s.fogExp?"#define FOG_EXP2":"",s.map?"#define USE_MAP":"",s.matcap?"#define USE_MATCAP":"",s.envMap?"#define USE_ENVMAP":"",s.envMap?"#define "+A:"",s.envMap?"#define "+p:"",s.envMap?"#define "+F:"",s.lightMap?"#define USE_LIGHTMAP":"",s.aoMap?"#define USE_AOMAP":"",s.emissiveMap?"#define USE_EMISSIVEMAP":"",s.bumpMap?"#define USE_BUMPMAP":"",s.normalMap?"#define USE_NORMALMAP":"",s.normalMap&&s.objectSpaceNormalMap?"#define OBJECTSPACE_NORMALMAP":"",s.specularMap?"#define USE_SPECULARMAP":"",s.roughnessMap?"#define USE_ROUGHNESSMAP":"",s.metalnessMap?"#define USE_METALNESSMAP":"",s.alphaMap?"#define USE_ALPHAMAP":"",s.vertexColors?"#define USE_COLOR":"",s.gradientMap?"#define USE_GRADIENTMAP":"",s.flatShading?"#define FLAT_SHADED":"",s.doubleSided?"#define DOUBLE_SIDED":"",s.flipSided?"#define FLIP_SIDED":"",s.shadowMapEnabled?"#define USE_SHADOWMAP":"",s.shadowMapEnabled?"#define "+h:"",s.premultipliedAlpha?"#define PREMULTIPLIED_ALPHA":"",s.physicallyCorrectLights?"#define PHYSICALLY_CORRECT_LIGHTS":"",s.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"",s.logarithmicDepthBuffer&&(r.isWebGL2||t.get("EXT_frag_depth"))?"#define USE_LOGDEPTHBUF_EXT":"",s.envMap&&(r.isWebGL2||t.get("EXT_shader_texture_lod"))?"#define TEXTURE_LOD_EXT":"","uniform mat4 viewMatrix;","uniform vec3 cameraPosition;",s.toneMapping!==ep?"#define TONE_MAPPING":"",s.toneMapping!==ep?Qt.tonemapping_pars_fragment:"",s.toneMapping!==ep?Dz("toneMapping",s.toneMapping):"",s.dithering?"#define DITHERING":"",s.outputEncoding||s.mapEncoding||s.matcapEncoding||s.envMapEncoding||s.emissiveMapEncoding?Qt.encodings_pars_fragment:"",s.mapEncoding?Ld("mapTexelToLinear",s.mapEncoding):"",s.matcapEncoding?Ld("matcapTexelToLinear",s.matcapEncoding):"",s.envMapEncoding?Ld("envMapTexelToLinear",s.envMapEncoding):"",s.emissiveMapEncoding?Ld("emissiveMapTexelToLinear",s.emissiveMapEncoding):"",s.outputEncoding?Sz("linearToOutputTexel",s.outputEncoding):"",s.depthPacking?"#define DEPTH_PACKING "+n.depthPacking:"",`
|
|||
|
`].filter(_u).join(`
|
|||
|
`)),u=Bm(u),u=Vb(u,s),u=Wb(u,s),f=Bm(f),f=Vb(f,s),f=Wb(f,s),u=Kb(u),f=Kb(f),r.isWebGL2&&!n.isRawShaderMaterial){var U=!1,N=/^\s*#version\s+300\s+es\s*\n/;n.isShaderMaterial&&u.match(N)!==null&&f.match(N)!==null&&(U=!0,u=u.replace(N,""),f=f.replace(N,"")),B=[`#version 300 es
|
|||
|
`,"#define attribute in","#define varying out","#define texture2D texture"].join(`
|
|||
|
`)+`
|
|||
|
`+B,S=[`#version 300 es
|
|||
|
`,"#define varying in",U?"":"out highp vec4 pc_fragColor;",U?"":"#define gl_FragColor pc_fragColor","#define gl_FragDepthEXT gl_FragDepth","#define texture2D texture","#define textureCube texture","#define texture2DProj textureProj","#define texture2DLodEXT textureLod","#define texture2DProjLodEXT textureProjLod","#define textureCubeLodEXT textureLod","#define texture2DGradEXT textureGrad","#define texture2DProjGradEXT textureProjGrad","#define textureCubeGradEXT textureGrad"].join(`
|
|||
|
`)+`
|
|||
|
`+S}var z=B+u,Q=S+f,R=qb(o,o.VERTEX_SHADER,z),W=qb(o,o.FRAGMENT_SHADER,Q);o.attachShader(C,R),o.attachShader(C,W),n.index0AttributeName!==void 0?o.bindAttribLocation(C,0,n.index0AttributeName):s.morphTargets===!0&&o.bindAttribLocation(C,0,"position"),o.linkProgram(C);var Z=o.getProgramInfoLog(C).trim(),de=o.getShaderInfoLog(R).trim(),Fe=o.getShaderInfoLog(W).trim(),ge=!0,le=!0;o.getProgramParameter(C,o.LINK_STATUS)===!1?(ge=!1,console.error("THREE.WebGLProgram: shader error: ",o.getError(),"gl.VALIDATE_STATUS",o.getProgramParameter(C,o.VALIDATE_STATUS),"gl.getProgramInfoLog",Z,de,Fe)):Z!==""?console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",Z):(de===""||Fe==="")&&(le=!1),le&&(this.diagnostics={runnable:ge,material:n,programLog:Z,vertexShader:{log:de,prefix:B},fragmentShader:{log:Fe,prefix:S}}),o.deleteShader(R),o.deleteShader(W);var se;this.getUniforms=function(){return se===void 0&&(se=new fr(o,C,e)),se};var me;return this.getAttributes=function(){return me===void 0&&(me=Iz(o,C)),me},this.destroy=function(){o.deleteProgram(C),this.program=void 0},Object.defineProperties(this,{uniforms:{get:function(){return console.warn("THREE.WebGLProgram: .uniforms is now .getUniforms()."),this.getUniforms()}},attributes:{get:function(){return console.warn("THREE.WebGLProgram: .attributes is now .getAttributes()."),this.getAttributes()}}}),this.name=a.name,this.id=Bz++,this.code=i,this.usedTimes=1,this.program=C,this.vertexShader=R,this.fragmentShader=W,this}function Mz(e,t,i){var n=[],a={MeshDepthMaterial:"depth",MeshDistanceMaterial:"distanceRGBA",MeshNormalMaterial:"normal",MeshBasicMaterial:"basic",MeshLambertMaterial:"lambert",MeshPhongMaterial:"phong",MeshToonMaterial:"phong",MeshStandardMaterial:"physical",MeshPhysicalMaterial:"physical",MeshMatcapMaterial:"matcap",LineBasicMaterial:"basic",LineDashedMaterial:"dashed",PointsMaterial:"points",ShadowMaterial:"shadow",SpriteMaterial:"sprite"},s=["precision","supportsVertexTextures","map","mapEncoding","matcap","matcapEncoding","envMap","envMapMode","envMapEncoding","lightMap","aoMap","emissiveMap","emissiveMapEncoding","bumpMap","normalMap","objectSpaceNormalMap","displacementMap","specularMap","roughnessMap","metalnessMap","gradientMap","alphaMap","combine","vertexColors","fog","useFog","fogExp","flatShading","sizeAttenuation","logarithmicDepthBuffer","skinning","maxBones","useVertexTexture","morphTargets","morphNormals","maxMorphTargets","maxMorphNormals","premultipliedAlpha","numDirLights","numPointLights","numSpotLights","numHemiLights","numRectAreaLights","shadowMapEnabled","shadowMapType","toneMapping","physicallyCorrectLights","alphaTest","doubleSided","flipSided","numClippingPlanes","numClipIntersection","depthPacking","dithering"];function r(l){var u=l.skeleton,f=u.bones;if(i.floatVertexTextures)return 1024;var h=i.maxVertexUniforms,A=Math.floor((h-20)/4),p=Math.min(A,f.length);return p<f.length?(console.warn("THREE.WebGLRenderer: Skeleton has "+f.length+" bones. This GPU supports "+p+"."),0):p}function o(l,u){var f;return l?l.isTexture?f=l.encoding:l.isWebGLRenderTarget&&(console.warn("THREE.WebGLPrograms.getTextureEncodingFromMap: don't use render targets as textures. Use their .texture property instead."),f=l.texture.encoding):f=$f,f===$f&&u&&(f=Z3),f}this.getParameters=function(l,u,f,h,A,p,F){var y=a[l.type],E=F.isSkinnedMesh?r(F):0,w=i.precision;l.precision!==null&&(w=i.getMaxPrecision(l.precision),w!==l.precision&&console.warn("THREE.WebGLProgram.getParameters:",l.precision,"not supported, using",w,"instead."));var C=e.getRenderTarget(),B={shaderID:y,precision:w,supportsVertexTextures:i.vertexTextures,outputEncoding:o(C?C.texture:null,e.gammaOutput),map:!!l.map,mapEncoding:o(l.map,e.gammaInput),matcap:!!l.matcap,matcapEncoding:o(l.matcap,e.gammaInput),envMap:!!l.envMap,envMapMode:l.envMap&&l.envMap.mapping,envMapEncoding:o(l.envMap,e.gammaInput),envMapCubeUV:!!l.envMap&&(l.envMap.mapping===E1||l.envMap.mapping===k1),lightMap:!!l.lightMap,aoMap:!!l.aoMap,emissiveMap:!!l.emissiveMap,emissiveMapEncoding:o(l.emissiveMap,e.gammaInput),bumpMap:!!l.bumpMap,normalM
|
|||
|
Object.assign(ha.prototype,{beforeStart_:ha.prototype.copySampleValue_,afterEnd_:ha.prototype.copySampleValue_});function Mm(e,t,i,n){ha.call(this,e,t,i,n),this._weightPrev=-0,this._offsetPrev=-0,this._weightNext=-0,this._offsetNext=-0}Mm.prototype=Object.assign(Object.create(ha.prototype),{constructor:Mm,DefaultSettings_:{endingStart:pl,endingEnd:pl},intervalChanged_:function(e,t,i){var n=this.parameterPositions,a=e-2,s=e+1,r=n[a],o=n[s];if(r===void 0)switch(this.getSettings_().endingStart){case Zo:a=e,r=2*t-i;break;case Zf:a=n.length-2,r=t+n[a]-n[a+1];break;default:a=e,r=i}if(o===void 0)switch(this.getSettings_().endingEnd){case Zo:s=e,o=2*i-t;break;case Zf:s=1,o=i+n[1]-n[0];break;default:s=e-1,o=t}var l=(i-t)*.5,u=this.valueSize;this._weightPrev=l/(t-r),this._weightNext=l/(o-i),this._offsetPrev=a*u,this._offsetNext=s*u},interpolate_:function(e,t,i,n){for(var a=this.resultBuffer,s=this.sampleValues,r=this.valueSize,o=e*r,l=o-r,u=this._offsetPrev,f=this._offsetNext,h=this._weightPrev,A=this._weightNext,p=(i-t)/(n-t),F=p*p,y=F*p,E=-h*y+2*h*F-h*p,w=(1+h)*y+(-1.5-2*h)*F+(-.5+h)*p+1,C=(-1-A)*y+(1.5+A)*F+.5*p,B=A*y-A*F,S=0;S!==r;++S)a[S]=E*s[u+S]+w*s[l+S]+C*s[o+S]+B*s[f+S];return a}});function Dh(e,t,i,n){ha.call(this,e,t,i,n)}Dh.prototype=Object.assign(Object.create(ha.prototype),{constructor:Dh,interpolate_:function(e,t,i,n){for(var a=this.resultBuffer,s=this.sampleValues,r=this.valueSize,o=e*r,l=o-r,u=(i-t)/(n-t),f=1-u,h=0;h!==r;++h)a[h]=s[l+h]*f+s[o+h]*u;return a}});function Lm(e,t,i,n){ha.call(this,e,t,i,n)}Lm.prototype=Object.assign(Object.create(ha.prototype),{constructor:Lm,interpolate_:function(e){return this.copySampleValue_(e-1)}});function Fn(e,t,i,n){if(e===void 0)throw new Error("THREE.KeyframeTrack: track name is undefined");if(t===void 0||t.length===0)throw new Error("THREE.KeyframeTrack: no keyframes in track named "+e);this.name=e,this.times=Sn.convertArray(t,this.TimeBufferType),this.values=Sn.convertArray(i,this.ValueBufferType),this.setInterpolation(n||this.DefaultInterpolation)}Object.assign(Fn,{toJSON:function(e){var t=e.constructor,i;if(t.toJSON!==void 0)i=t.toJSON(e);else{i={name:e.name,times:Sn.convertArray(e.times,Array),values:Sn.convertArray(e.values,Array)};var n=e.getInterpolation();n!==e.DefaultInterpolation&&(i.interpolation=n)}return i.type=e.ValueTypeName,i}});Object.assign(Fn.prototype,{constructor:Fn,TimeBufferType:Float32Array,ValueBufferType:Float32Array,DefaultInterpolation:Cf,InterpolantFactoryMethodDiscrete:function(e){return new Lm(this.times,this.values,this.getValueSize(),e)},InterpolantFactoryMethodLinear:function(e){return new Dh(this.times,this.values,this.getValueSize(),e)},InterpolantFactoryMethodSmooth:function(e){return new Mm(this.times,this.values,this.getValueSize(),e)},setInterpolation:function(e){var t;switch(e){case Jf:t=this.InterpolantFactoryMethodDiscrete;break;case Cf:t=this.InterpolantFactoryMethodLinear;break;case tp:t=this.InterpolantFactoryMethodSmooth;break}if(t===void 0){var i="unsupported interpolation for "+this.ValueTypeName+" keyframe track named "+this.name;if(this.createInterpolant===void 0)if(e!==this.DefaultInterpolation)this.setInterpolation(this.DefaultInterpolation);else throw new Error(i);return console.warn("THREE.KeyframeTrack:",i),this}return this.createInterpolant=t,this},getInterpolation:function(){switch(this.createInterpolant){case this.InterpolantFactoryMethodDiscrete:return Jf;case this.InterpolantFactoryMethodLinear:return Cf;case this.InterpolantFactoryMethodSmooth:return tp}},getValueSize:function(){return this.values.length/this.times.length},shift:function(e){if(e!==0)for(var t=this.times,i=0,n=t.length;i!==n;++i)t[i]+=e;return this},scale:function(e){if(e!==1)for(var t=this.times,i=0,n=t.length;i!==n;++i)t[i]*=e;return this},trim:function(e,t){for(var i=this.times,n=i.length,a=0,s=n-1;a!==n&&i[a]<e;)++a;for(;s!==-1&&i[s]>t;)--s;if(++s,a!==0||s!==n){a>=s&&(s=Math.max(s,1),a=s-1);var r=this.getValueSize();this.times=Sn.arraySlice(i,a,s),this.values=Sn.arraySlice(this.values,a*r,s*r)}return this},validate:function(){var e=!0,t=this.g
|
|||
|
`)o=0,l-=s;else{var h=Eq(f,a,o,l,i);o+=h.offsetX,r.push(h.path)}}return r}function Eq(e,t,i,n,a){var s=a.glyphs[e]||a.glyphs["?"];if(s){var r=new k4,o,l,u,f,h,A,p,F;if(s.o)for(var y=s._cachedOutline||(s._cachedOutline=s.o.split(" ")),E=0,w=y.length;E<w;){var C=y[E++];switch(C){case"m":o=y[E++]*t+i,l=y[E++]*t+n,r.moveTo(o,l);break;case"l":o=y[E++]*t+i,l=y[E++]*t+n,r.lineTo(o,l);break;case"q":u=y[E++]*t+i,f=y[E++]*t+n,h=y[E++]*t+i,A=y[E++]*t+n,r.quadraticCurveTo(h,A,u,f);break;case"b":u=y[E++]*t+i,f=y[E++]*t+n,h=y[E++]*t+i,A=y[E++]*t+n,p=y[E++]*t+i,F=y[E++]*t+n,r.bezierCurveTo(h,A,p,F,u,f);break}}return{offsetX:s.ha*t,path:r}}}function kq(e){this.manager=e!==void 0?e:Zn}Object.assign(kq.prototype,{load:function(e,t,i,n){var a=this,s=new Ns(this.manager);s.setPath(this.path),s.load(e,function(r){var o;try{o=JSON.parse(r)}catch{console.warn("THREE.FontLoader: typeface.js support is being deprecated. Use typeface.json instead."),o=JSON.parse(r.substring(65,r.length-2))}var l=a.parse(o);t&&t(l)},i,n)},parse:function(e){return new B4(e)},setPath:function(e){return this.path=e,this}});function Th(){}Th.Handlers={handlers:[],add:function(e,t){this.handlers.push(e,t)},get:function(e){for(var t=this.handlers,i=0,n=t.length;i<n;i+=2){var a=t[i],s=t[i+1];if(a.test(e))return s}return null}};Object.assign(Th.prototype,{crossOrigin:"anonymous",onLoadStart:function(){},onLoadProgress:function(){},onLoadComplete:function(){},initMaterials:function(e,t,i){for(var n=[],a=0;a<e.length;++a)n[a]=this.createMaterial(e[a],t,i);return n},createMaterial:function(){var e={NoBlending:Uu,NormalBlending:ll,AdditiveBlending:gm,SubtractiveBlending:pm,MultiplyBlending:mm,CustomBlending:G3},t=new Ft,i=new P1,n=new L1;return function(s,r,o){var l={};function u(p,F,y,E,w){var C=r+p,B=Th.Handlers.get(C),S;B!==null?S=B.load(C):(i.setCrossOrigin(o),S=i.load(C)),F!==void 0&&(S.repeat.fromArray(F),F[0]!==1&&(S.wrapS=ar),F[1]!==1&&(S.wrapT=ar)),y!==void 0&&S.offset.fromArray(y),E!==void 0&&(E[0]==="repeat"&&(S.wrapS=ar),E[0]==="mirror"&&(S.wrapS=gl),E[1]==="repeat"&&(S.wrapT=ar),E[1]==="mirror"&&(S.wrapT=gl)),w!==void 0&&(S.anisotropy=w);var U=It.generateUUID();return l[U]=S,U}var f={uuid:It.generateUUID(),type:"MeshLambertMaterial"};for(var h in s){var A=s[h];switch(h){case"DbgColor":case"DbgIndex":case"opticalDensity":case"illumination":break;case"DbgName":f.name=A;break;case"blending":f.blending=e[A];break;case"colorAmbient":case"mapAmbient":console.warn("THREE.Loader.createMaterial:",h,"is no longer supported.");break;case"colorDiffuse":f.color=t.fromArray(A).getHex();break;case"colorSpecular":f.specular=t.fromArray(A).getHex();break;case"colorEmissive":f.emissive=t.fromArray(A).getHex();break;case"specularCoef":f.shininess=A;break;case"shading":A.toLowerCase()==="basic"&&(f.type="MeshBasicMaterial"),A.toLowerCase()==="phong"&&(f.type="MeshPhongMaterial"),A.toLowerCase()==="standard"&&(f.type="MeshStandardMaterial");break;case"mapDiffuse":f.map=u(A,s.mapDiffuseRepeat,s.mapDiffuseOffset,s.mapDiffuseWrap,s.mapDiffuseAnisotropy);break;case"mapDiffuseRepeat":case"mapDiffuseOffset":case"mapDiffuseWrap":case"mapDiffuseAnisotropy":break;case"mapEmissive":f.emissiveMap=u(A,s.mapEmissiveRepeat,s.mapEmissiveOffset,s.mapEmissiveWrap,s.mapEmissiveAnisotropy);break;case"mapEmissiveRepeat":case"mapEmissiveOffset":case"mapEmissiveWrap":case"mapEmissiveAnisotropy":break;case"mapLight":f.lightMap=u(A,s.mapLightRepeat,s.mapLightOffset,s.mapLightWrap,s.mapLightAnisotropy);break;case"mapLightRepeat":case"mapLightOffset":case"mapLightWrap":case"mapLightAnisotropy":break;case"mapAO":f.aoMap=u(A,s.mapAORepeat,s.mapAOOffset,s.mapAOWrap,s.mapAOAnisotropy);break;case"mapAORepeat":case"mapAOOffset":case"mapAOWrap":case"mapAOAnisotropy":break;case"mapBump":f.bumpMap=u(A,s.mapBumpRepeat,s.mapBumpOffset,s.mapBumpWrap,s.mapBumpAnisotropy);break;case"mapBumpScale":f.bumpScale=A;break;case"mapBumpRepeat":case"mapBumpOffset":case"mapBumpWrap":case"mapBumpAnisotropy":break;case"mapNormal":f.normalMap=u(A,s.mapNormalRepeat,s.mapNormalOffset,s.mapNormalWrap,s.mapNormalAnisotropy);break;case"map
|
|||
|
Object.assign(Dn.prototype,{_getValue_unbound:Dn.prototype.getValue,_setValue_unbound:Dn.prototype.setValue});function Sq(){this.uuid=It.generateUUID(),this._objects=Array.prototype.slice.call(arguments),this.nCachedObjects_=0;var e={};this._indicesByUUID=e;for(var t=0,i=arguments.length;t!==i;++t)e[arguments[t].uuid]=t;this._paths=[],this._parsedPaths=[],this._bindings=[],this._bindingsIndicesByPath={};var n=this;this.stats={objects:{get total(){return n._objects.length},get inUse(){return this.total-n.nCachedObjects_}},get bindingsPerObject(){return n._bindings.length}}}Object.assign(Sq.prototype,{isAnimationObjectGroup:!0,add:function(){for(var e=this._objects,t=e.length,i=this.nCachedObjects_,n=this._indicesByUUID,a=this._paths,s=this._parsedPaths,r=this._bindings,o=r.length,l=void 0,u=0,f=arguments.length;u!==f;++u){var h=arguments[u],A=h.uuid,p=n[A];if(p===void 0){p=t++,n[A]=p,e.push(h);for(var F=0,y=o;F!==y;++F)r[F].push(new Dn(h,a[F],s[F]))}else if(p<i){l=e[p];var E=--i,w=e[E];n[w.uuid]=p,e[p]=w,n[A]=E,e[E]=h;for(var F=0,y=o;F!==y;++F){var C=r[F],B=C[E],S=C[p];C[p]=B,S===void 0&&(S=new Dn(h,a[F],s[F])),C[E]=S}}else e[p]!==l&&console.error("THREE.AnimationObjectGroup: Different objects with the same UUID detected. Clean the caches or recreate your infrastructure when reloading scenes.")}this.nCachedObjects_=i},remove:function(){for(var e=this._objects,t=this.nCachedObjects_,i=this._indicesByUUID,n=this._bindings,a=n.length,s=0,r=arguments.length;s!==r;++s){var o=arguments[s],l=o.uuid,u=i[l];if(u!==void 0&&u>=t){var f=t++,h=e[f];i[h.uuid]=u,e[u]=h,i[l]=f,e[f]=o;for(var A=0,p=a;A!==p;++A){var F=n[A],y=F[f],E=F[u];F[u]=y,F[f]=E}}}this.nCachedObjects_=t},uncache:function(){for(var e=this._objects,t=e.length,i=this.nCachedObjects_,n=this._indicesByUUID,a=this._bindings,s=a.length,r=0,o=arguments.length;r!==o;++r){var l=arguments[r],u=l.uuid,f=n[u];if(f!==void 0)if(delete n[u],f<i){var h=--i,A=e[h],p=--t,F=e[p];n[A.uuid]=f,e[f]=A,n[F.uuid]=h,e[h]=F,e.pop();for(var y=0,E=s;y!==E;++y){var w=a[y],C=w[h],B=w[p];w[f]=C,w[h]=B,w.pop()}}else{var p=--t,F=e[p];n[F.uuid]=f,e[f]=F,e.pop();for(var y=0,E=s;y!==E;++y){var w=a[y];w[f]=w[p],w.pop()}}}this.nCachedObjects_=i},subscribe_:function(e,t){var i=this._bindingsIndicesByPath,n=i[e],a=this._bindings;if(n!==void 0)return a[n];var s=this._paths,r=this._parsedPaths,o=this._objects,l=o.length,u=this.nCachedObjects_,f=new Array(l);n=a.length,i[e]=n,s.push(e),r.push(t),a.push(f);for(var h=u,A=o.length;h!==A;++h){var p=o[h];f[h]=new Dn(p,e,t)}return f},unsubscribe_:function(e){var t=this._bindingsIndicesByPath,i=t[e];if(i!==void 0){var n=this._paths,a=this._parsedPaths,s=this._bindings,r=s.length-1,o=s[r],l=e[r];t[l]=i,s[i]=o,s.pop(),a[i]=a[r],a.pop(),n[i]=n[r],n.pop()}}});function M4(e,t,i){this._mixer=e,this._clip=t,this._localRoot=i||null;for(var n=t.tracks,a=n.length,s=new Array(a),r={endingStart:pl,endingEnd:pl},o=0;o!==a;++o){var l=n[o].createInterpolant(null);s[o]=l,l.settings=r}this._interpolantSettings=r,this._interpolants=s,this._propertyBindings=new Array(a),this._cacheIndex=null,this._byClipCacheIndex=null,this._timeScaleInterpolant=null,this._weightInterpolant=null,this.loop=hQ,this._loopCount=-1,this._startTime=null,this.time=0,this.timeScale=1,this._effectiveTimeScale=1,this.weight=1,this._effectiveWeight=1,this.repetitions=1/0,this.paused=!1,this.enabled=!0,this.clampWhenFinished=!1,this.zeroSlopeAtStart=!0,this.zeroSlopeAtEnd=!0}Object.assign(M4.prototype,{play:function(){return this._mixer._activateAction(this),this},stop:function(){return this._mixer._deactivateAction(this),this.reset()},reset:function(){return this.paused=!1,this.enabled=!0,this.time=0,this._loopCount=-1,this._startTime=null,this.stopFading().stopWarping()},isRunning:function(){return this.enabled&&!this.paused&&this.timeScale!==0&&this._startTime===null&&this._mixer._isActiveAction(this)},isScheduled:function(){return this._mixer._isActiveAction(this)},startAt:function(e){return this._startTime=e,this},setLoop:function(e,t){return this.loop=e,this.repetitions=t,this},setEffectiveWeight:function(e){
|
|||
|
const Iq="",Pq="";let Nd=!1,pv,Mq=0;function Lq(e){const[t,i]=e.split(","),n=t.match(/:(.*?);/)[1],a=atob(i),s=new ArrayBuffer(a.length),r=new Uint8Array(s);for(let o=0;o<a.length;o++)r[o]=a.charCodeAt(o);return new Blob([s],{type:n})}class Rq{constructor(){window.addEventListener("resize",this.resize.bind(this)),this.renderer=new I1,this.renderer.autoClear=!1,this.renderer.setSize(window.innerWidth,window.innerHeight),this.rtTexture=new Ya(window.innerWidth,window.innerHeight,{minFilter:mn,magFilter:rn,format:Ss,type:xA}),this.gameTexture=new bl,this.gameTexture.needsUpdate=!0,this.material=this.createShaderMaterial(),this.sceneRTT=this.createScene(),this.cameraRTT=this.createCamera(),this.appendRendererToDOM(),this.animate=this.animate.bind(this),requestAnimationFrame(this.animate)}createCamera(){const t=new Rc(window.innerWidth/-2,window.innerWidth/2,window.innerHeight/2,window.innerHeight/-2,-1e4,1e4);return t.setViewOffset(window.innerWidth,window.innerHeight,0,0,window.innerWidth,window.innerHeight),t}createScene(){const t=new ah,i=new ho(window.innerWidth,window.innerHeight),n=new xn(i,this.material);return n.position.z=-100,t.add(n),t}createShaderMaterial(){return new Kn({uniforms:{tDiffuse:{value:this.gameTexture}},vertexShader:`
|
|||
|
varying vec2 vUv;
|
|||
|
void main() {
|
|||
|
vUv = vec2(uv.x, 1.0 - uv.y);
|
|||
|
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
|
|||
|
}`,fragmentShader:`
|
|||
|
varying vec2 vUv;
|
|||
|
uniform sampler2D tDiffuse;
|
|||
|
void main() {
|
|||
|
gl_FragColor = texture2D(tDiffuse, vUv);
|
|||
|
}`})}appendRendererToDOM(){const t=document.createElement("div");t.id="three-game-render",t.style.display="none",t.appendChild(this.renderer.domElement),document.body.appendChild(t)}resize(){this.cameraRTT=this.createCamera(),this.sceneRTT=this.createScene(),this.rtTexture.setSize(window.innerWidth,window.innerHeight),this.renderer.setSize(window.innerWidth,window.innerHeight)}animate(){if(requestAnimationFrame(this.animate),Nd){this.renderer.clear(),this.renderer.render(this.sceneRTT,this.cameraRTT,this.rtTexture,!0);const t=new Uint8Array(window.innerWidth*window.innerHeight*4);this.renderer.readRenderTargetPixels(this.rtTexture,0,0,window.innerWidth,window.innerHeight,t),this.updateCanvas(t)}}updateCanvas(t){this.canvas||this.createTempCanvas(),this.canvas.style.display="inline",this.canvas.width=window.innerWidth,this.canvas.height=window.innerHeight;const i=this.canvas.getContext("2d"),n=new ImageData(new Uint8ClampedArray(t.buffer),window.innerWidth,window.innerHeight);i.putImageData(n,0,0)}createTempCanvas(){this.canvas=document.createElement("canvas"),this.canvas.style.display="none",document.body.appendChild(this.canvas)}renderToTarget(t){this.canvas=t,Nd=!0}async requestScreenshot(t=Iq,i=Pq,n={}){if(!t||!i)return console.warn("URL or field is not defined."),null;this.canvas||this.createTempCanvas(),Nd=!0,await new Promise(r=>setTimeout(r,10));const a=this.canvas.toDataURL("image/png"),s=new FormData;s.append(i,Lq(a),"screenshot.png");try{const o=await(await fetch(t,{method:"POST",mode:"cors",headers:n,body:s})).json();return Mq++,this.canvas.style.display="none",o}catch(r){return console.error("Screenshot-Upload error:",r),null}finally{}}stop(){Nd=!1,this.canvas&&(this.canvas.style.display="none")}}setTimeout(()=>{pv=new Rq,window.MainRender=pv},1e3);var lp={};/*!
|
|||
|
* howler.js v2.2.4
|
|||
|
* howlerjs.com
|
|||
|
*
|
|||
|
* (c) 2013-2020, James Simpson of GoldFire Studios
|
|||
|
* goldfirestudios.com
|
|||
|
*
|
|||
|
* MIT License
|
|||
|
*/var mv;function Uq(){return mv||(mv=1,function(e){(function(){var t=function(){this.init()};t.prototype={init:function(){var h=this||i;return h._counter=1e3,h._html5AudioPool=[],h.html5PoolSize=10,h._codecs={},h._howls=[],h._muted=!1,h._volume=1,h._canPlayEvent="canplaythrough",h._navigator=typeof window<"u"&&window.navigator?window.navigator:null,h.masterGain=null,h.noAudio=!1,h.usingWebAudio=!0,h.autoSuspend=!0,h.ctx=null,h.autoUnlock=!0,h._setup(),h},volume:function(h){var A=this||i;if(h=parseFloat(h),A.ctx||f(),typeof h<"u"&&h>=0&&h<=1){if(A._volume=h,A._muted)return A;A.usingWebAudio&&A.masterGain.gain.setValueAtTime(h,i.ctx.currentTime);for(var p=0;p<A._howls.length;p++)if(!A._howls[p]._webAudio)for(var F=A._howls[p]._getSoundIds(),y=0;y<F.length;y++){var E=A._howls[p]._soundById(F[y]);E&&E._node&&(E._node.volume=E._volume*h)}return A}return A._volume},mute:function(h){var A=this||i;A.ctx||f(),A._muted=h,A.usingWebAudio&&A.masterGain.gain.setValueAtTime(h?0:A._volume,i.ctx.currentTime);for(var p=0;p<A._howls.length;p++)if(!A._howls[p]._webAudio)for(var F=A._howls[p]._getSoundIds(),y=0;y<F.length;y++){var E=A._howls[p]._soundById(F[y]);E&&E._node&&(E._node.muted=h?!0:E._muted)}return A},stop:function(){for(var h=this||i,A=0;A<h._howls.length;A++)h._howls[A].stop();return h},unload:function(){for(var h=this||i,A=h._howls.length-1;A>=0;A--)h._howls[A].unload();return h.usingWebAudio&&h.ctx&&typeof h.ctx.close<"u"&&(h.ctx.close(),h.ctx=null,f()),h},codecs:function(h){return(this||i)._codecs[h.replace(/^x-/,"")]},_setup:function(){var h=this||i;if(h.state=h.ctx&&h.ctx.state||"suspended",h._autoSuspend(),!h.usingWebAudio)if(typeof Audio<"u")try{var A=new Audio;typeof A.oncanplaythrough>"u"&&(h._canPlayEvent="canplay")}catch{h.noAudio=!0}else h.noAudio=!0;try{var A=new Audio;A.muted&&(h.noAudio=!0)}catch{}return h.noAudio||h._setupCodecs(),h},_setupCodecs:function(){var h=this||i,A=null;try{A=typeof Audio<"u"?new Audio:null}catch{return h}if(!A||typeof A.canPlayType!="function")return h;var p=A.canPlayType("audio/mpeg;").replace(/^no$/,""),F=h._navigator?h._navigator.userAgent:"",y=F.match(/OPR\/(\d+)/g),E=y&&parseInt(y[0].split("/")[1],10)<33,w=F.indexOf("Safari")!==-1&&F.indexOf("Chrome")===-1,C=F.match(/Version\/(.*?) /),B=w&&C&&parseInt(C[1],10)<15;return h._codecs={mp3:!!(!E&&(p||A.canPlayType("audio/mp3;").replace(/^no$/,""))),mpeg:!!p,opus:!!A.canPlayType('audio/ogg; codecs="opus"').replace(/^no$/,""),ogg:!!A.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),oga:!!A.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),wav:!!(A.canPlayType('audio/wav; codecs="1"')||A.canPlayType("audio/wav")).replace(/^no$/,""),aac:!!A.canPlayType("audio/aac;").replace(/^no$/,""),caf:!!A.canPlayType("audio/x-caf;").replace(/^no$/,""),m4a:!!(A.canPlayType("audio/x-m4a;")||A.canPlayType("audio/m4a;")||A.canPlayType("audio/aac;")).replace(/^no$/,""),m4b:!!(A.canPlayType("audio/x-m4b;")||A.canPlayType("audio/m4b;")||A.canPlayType("audio/aac;")).replace(/^no$/,""),mp4:!!(A.canPlayType("audio/x-mp4;")||A.canPlayType("audio/mp4;")||A.canPlayType("audio/aac;")).replace(/^no$/,""),weba:!!(!B&&A.canPlayType('audio/webm; codecs="vorbis"').replace(/^no$/,"")),webm:!!(!B&&A.canPlayType('audio/webm; codecs="vorbis"').replace(/^no$/,"")),dolby:!!A.canPlayType('audio/mp4; codecs="ec-3"').replace(/^no$/,""),flac:!!(A.canPlayType("audio/x-flac;")||A.canPlayType("audio/flac;")).replace(/^no$/,"")},h},_unlockAudio:function(){var h=this||i;if(!(h._audioUnlocked||!h.ctx)){h._audioUnlocked=!1,h.autoUnlock=!1,!h._mobileUnloaded&&h.ctx.sampleRate!==44100&&(h._mobileUnloaded=!0,h.unload()),h._scratchBuffer=h.ctx.createBuffer(1,1,22050);var A=function(p){for(;h._html5AudioPool.length<h.html5PoolSize;)try{var F=new Audio;F._unlocked=!0,h._releaseHtml5Audio(F)}catch{h.noAudio=!0;break}for(var y=0;y<h._howls.length;y++)if(!h._howls[y]._webAudio)for(var E=h._howls[y]._getSoundIds(),w=0;w<E.length;w++){var C=h._howls[y]._soundById(E[w]);C&&C._node&&!C._node._unlocked&&(C._node._unlocked=!0,C._node.load())}h._autoResume();var B=h.ctx.createBu
|
|||
|
* Spatial Plugin - Adds support for stereo and 3D audio where Web Audio is supported.
|
|||
|
*
|
|||
|
* howler.js v2.2.4
|
|||
|
* howlerjs.com
|
|||
|
*
|
|||
|
* (c) 2013-2020, James Simpson of GoldFire Studios
|
|||
|
* goldfirestudios.com
|
|||
|
*
|
|||
|
* MIT License
|
|||
|
*/(function(){HowlerGlobal.prototype._pos=[0,0,0],HowlerGlobal.prototype._orientation=[0,0,-1,0,1,0],HowlerGlobal.prototype.stereo=function(i){var n=this;if(!n.ctx||!n.ctx.listener)return n;for(var a=n._howls.length-1;a>=0;a--)n._howls[a].stereo(i);return n},HowlerGlobal.prototype.pos=function(i,n,a){var s=this;if(!s.ctx||!s.ctx.listener)return s;if(n=typeof n!="number"?s._pos[1]:n,a=typeof a!="number"?s._pos[2]:a,typeof i=="number")s._pos=[i,n,a],typeof s.ctx.listener.positionX<"u"?(s.ctx.listener.positionX.setTargetAtTime(s._pos[0],Howler.ctx.currentTime,.1),s.ctx.listener.positionY.setTargetAtTime(s._pos[1],Howler.ctx.currentTime,.1),s.ctx.listener.positionZ.setTargetAtTime(s._pos[2],Howler.ctx.currentTime,.1)):s.ctx.listener.setPosition(s._pos[0],s._pos[1],s._pos[2]);else return s._pos;return s},HowlerGlobal.prototype.orientation=function(i,n,a,s,r,o){var l=this;if(!l.ctx||!l.ctx.listener)return l;var u=l._orientation;if(n=typeof n!="number"?u[1]:n,a=typeof a!="number"?u[2]:a,s=typeof s!="number"?u[3]:s,r=typeof r!="number"?u[4]:r,o=typeof o!="number"?u[5]:o,typeof i=="number")l._orientation=[i,n,a,s,r,o],typeof l.ctx.listener.forwardX<"u"?(l.ctx.listener.forwardX.setTargetAtTime(i,Howler.ctx.currentTime,.1),l.ctx.listener.forwardY.setTargetAtTime(n,Howler.ctx.currentTime,.1),l.ctx.listener.forwardZ.setTargetAtTime(a,Howler.ctx.currentTime,.1),l.ctx.listener.upX.setTargetAtTime(s,Howler.ctx.currentTime,.1),l.ctx.listener.upY.setTargetAtTime(r,Howler.ctx.currentTime,.1),l.ctx.listener.upZ.setTargetAtTime(o,Howler.ctx.currentTime,.1)):l.ctx.listener.setOrientation(i,n,a,s,r,o);else return u;return l},Howl.prototype.init=function(i){return function(n){var a=this;return a._orientation=n.orientation||[1,0,0],a._stereo=n.stereo||null,a._pos=n.pos||null,a._pannerAttr={coneInnerAngle:typeof n.coneInnerAngle<"u"?n.coneInnerAngle:360,coneOuterAngle:typeof n.coneOuterAngle<"u"?n.coneOuterAngle:360,coneOuterGain:typeof n.coneOuterGain<"u"?n.coneOuterGain:0,distanceModel:typeof n.distanceModel<"u"?n.distanceModel:"inverse",maxDistance:typeof n.maxDistance<"u"?n.maxDistance:1e4,panningModel:typeof n.panningModel<"u"?n.panningModel:"HRTF",refDistance:typeof n.refDistance<"u"?n.refDistance:1,rolloffFactor:typeof n.rolloffFactor<"u"?n.rolloffFactor:1},a._onstereo=n.onstereo?[{fn:n.onstereo}]:[],a._onpos=n.onpos?[{fn:n.onpos}]:[],a._onorientation=n.onorientation?[{fn:n.onorientation}]:[],i.call(this,n)}}(Howl.prototype.init),Howl.prototype.stereo=function(i,n){var a=this;if(!a._webAudio)return a;if(a._state!=="loaded")return a._queue.push({event:"stereo",action:function(){a.stereo(i,n)}}),a;var s=typeof Howler.ctx.createStereoPanner>"u"?"spatial":"stereo";if(typeof n>"u")if(typeof i=="number")a._stereo=i,a._pos=[i,0,0];else return a._stereo;for(var r=a._getSoundIds(n),o=0;o<r.length;o++){var l=a._soundById(r[o]);if(l)if(typeof i=="number")l._stereo=i,l._pos=[i,0,0],l._node&&(l._pannerAttr.panningModel="equalpower",(!l._panner||!l._panner.pan)&&t(l,s),s==="spatial"?typeof l._panner.positionX<"u"?(l._panner.positionX.setValueAtTime(i,Howler.ctx.currentTime),l._panner.positionY.setValueAtTime(0,Howler.ctx.currentTime),l._panner.positionZ.setValueAtTime(0,Howler.ctx.currentTime)):l._panner.setPosition(i,0,0):l._panner.pan.setValueAtTime(i,Howler.ctx.currentTime)),a._emit("stereo",l._id);else return l._stereo}return a},Howl.prototype.pos=function(i,n,a,s){var r=this;if(!r._webAudio)return r;if(r._state!=="loaded")return r._queue.push({event:"pos",action:function(){r.pos(i,n,a,s)}}),r;if(n=typeof n!="number"?0:n,a=typeof a!="number"?-.5:a,typeof s>"u")if(typeof i=="number")r._pos=[i,n,a];else return r._pos;for(var o=r._getSoundIds(s),l=0;l<o.length;l++){var u=r._soundById(o[l]);if(u)if(typeof i=="number")u._pos=[i,n,a],u._node&&((!u._panner||u._panner.pan)&&t(u,"spatial"),typeof u._panner.positionX<"u"?(u._panner.positionX.setValueAtTime(i,Howler.ctx.currentTime),u._panner.positionY.setValueAtTime(n,Howler.ctx.currentTime),u._panner.positionZ.setValueAtTime(a,Howler.ctx.currentTime)):u._panner.setPosition(i,n,a)),r._emit("pos",u._id);else
|
|||
|
* {@link https://github.com/muaz-khan/RecordRTC|RecordRTC} is a WebRTC JavaScript library for audio/video as well as screen activity recording. It supports Chrome, Firefox, Opera, Android, and Microsoft Edge. Platforms: Linux, Mac and Windows.
|
|||
|
* @summary Record audio, video or screen inside the browser.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef RecordRTC
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = RecordRTC(mediaStream or [arrayOfMediaStream], {
|
|||
|
* type: 'video', // audio or video or gif or canvas
|
|||
|
* recorderType: MediaStreamRecorder || CanvasRecorder || StereoAudioRecorder || Etc
|
|||
|
* });
|
|||
|
* recorder.startRecording();
|
|||
|
* @see For further information:
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
|
|||
|
* @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, desiredSampRate: 16000, video: HTMLVideoElement, etc.}
|
|||
|
*/function t(I,D){if(!I)throw"First parameter is required.";D=D||{type:"video"},D=new i(I,D);var O=this;function he(ce){return D.disableLogs||console.log("RecordRTC version: ",O.version),ce&&(D=new i(I,ce)),D.disableLogs||console.log("started recording "+D.type+" stream."),re?(re.clearRecordedData(),re.record(),ke("recording"),O.recordingDuration&&ue(),O):(te(function(){O.recordingDuration&&ue()}),O)}function te(ce){ce&&(D.initCallback=function(){ce(),ce=D.initCallback=null});var De=new n(I,D);re=new De(I,D),re.record(),ke("recording"),D.disableLogs||console.log("Initialized recorderType:",re.constructor.name,"for output-type:",D.type)}function ae(ce){if(ce=ce||function(){},!re){K();return}if(O.state==="paused"){O.resumeRecording(),setTimeout(function(){ae(ce)},1);return}O.state!=="recording"&&!D.disableLogs&&console.warn('Recording state should be: "recording", however current state is: ',O.state),D.disableLogs||console.log("Stopped recording "+D.type+" stream."),D.type!=="gif"?re.stop(De):(re.stop(),De()),ke("stopped");function De(Ie){if(!re){typeof ce.call=="function"?ce.call(O,""):ce("");return}Object.keys(re).forEach(function(Ne){typeof re[Ne]!="function"&&(O[Ne]=re[Ne])});var fe=re.blob;if(!fe)if(Ie)re.blob=fe=Ie;else throw"Recording failed.";if(fe&&!D.disableLogs&&console.log(fe.type,"->",w(fe.size)),ce){var be;try{be=f.createObjectURL(fe)}catch{}typeof ce.call=="function"?ce.call(O,be):ce(be)}D.autoWriteToDisk&&we(function(Ne){var Ue={};Ue[D.type+"Blob"]=Ne,ge.Store(Ue)})}}function Ae(){if(!re){K();return}if(O.state!=="recording"){D.disableLogs||console.warn("Unable to pause the recording. Recording state: ",O.state);return}ke("paused"),re.pause(),D.disableLogs||console.log("Paused recording.")}function X(){if(!re){K();return}if(O.state!=="paused"){D.disableLogs||console.warn("Unable to resume the recording. Recording state: ",O.state);return}ke("recording"),re.resume(),D.disableLogs||console.log("Resumed recording.")}function ve(ce){postMessage(new FileReaderSync().readAsDataURL(ce))}function we(ce,De){if(!ce)throw"Pass a callback function over getDataURL.";var Ie=De?De.blob:(re||{}).blob;if(!Ie){D.disableLogs||console.warn("Blob encoder did not finish its job yet."),setTimeout(function(){we(ce,De)},1e3);return}if(typeof Worker<"u"&&!navigator.mozGetUserMedia){var fe=Ne(ve);fe.onmessage=function(Ue){ce(Ue.data)},fe.postMessage(Ie)}else{var be=new FileReader;be.readAsDataURL(Ie),be.onload=function(Ue){ce(Ue.target.result)}}function Ne(Ue){try{var Qe=f.createObjectURL(new Blob([Ue.toString(),"this.onmessage = function (eee) {"+Ue.name+"(eee.data);}"],{type:"application/javascript"})),Ge=new Worker(Qe);return f.revokeObjectURL(Qe),Ge}catch{}}}function ue(ce){if(ce=ce||0,O.state==="paused"){setTimeout(function(){ue(ce)},1e3);return}if(O.state!=="stopped"){if(ce>=O.recordingDuration){ae(O.onRecordingStopped);return}ce+=1e3,setTimeout(function(){ue(ce)},1e3)}}function ke(ce){O&&(O.state=ce,typeof O.onStateChanged.call=="function"?O.onStateChanged.call(O,ce):O.onStateChanged(ce))}var G='It seems that recorder is destroyed or "startRecording" is not invoked for '+D.type+" recorder.";function K(){D.disableLogs!==!0&&console.warn(G)}var re,Ce={startRecording:he,stopRecording:ae,pauseRecording:Ae,resumeRecording:X,initRecorder:te,setRecordingDuration:function(ce,De){if(typeof ce>"u")throw"recordingDuration is required.";if(typeof ce!="number")throw"recordingDuration must be a number.";return O.recordingDuration=ce,O.onRecordingStopped=De||function(){},{onRecordingStopped:function(Ie){O.onRecordingStopped=Ie}}},clearRecordedData:function(){if(!re){K();return}re.clearRecordedData(),D.disableLogs||console.log("Cleared old recorded data.")},getBlob:function(){if(!re){K();return}return re.blob},getDataURL:we,toURL:function(){if(!re){K();return}return f.createObjectURL(re.blob)},getInternalRecorder:function(){return re},save:function(ce){if(!re){K();return}C(re.blob,ce)},getFromDisk:function(ce){if(!re){K();return}t.getFromDisk(D.type,ce)},setAdvertisementArray:function(ce){D.advertisement=[];for(var De=ce.length,Ie=0;Ie<De;I
|
|||
|
* {@link RecordRTCConfiguration} is an inner/private helper for {@link RecordRTC}.
|
|||
|
* @summary It configures the 2nd parameter passed over {@link RecordRTC} and returns a valid "config" object.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef RecordRTCConfiguration
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var options = RecordRTCConfiguration(mediaStream, options);
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, getNativeBlob:true, etc.}
|
|||
|
*/function i(I,D){return!D.recorderType&&!D.type&&(D.audio&&D.video?D.type="video":D.audio&&!D.video&&(D.type="audio")),D.recorderType&&!D.type&&(D.recorderType===de||D.recorderType===Z||typeof q<"u"&&D.recorderType===q?D.type="video":D.recorderType===le?D.type="gif":D.recorderType===W?D.type="audio":D.recorderType===R&&(S(I,"audio").length&&S(I,"video").length||!S(I,"audio").length&&S(I,"video").length?D.type="video":S(I,"audio").length&&!S(I,"video").length&&(D.type="audio"))),typeof R<"u"&&typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&(D.mimeType||(D.mimeType="video/webm"),D.type||(D.type=D.mimeType.split("/")[0]),D.bitsPerSecond),D.type||(D.mimeType&&(D.type=D.mimeType.split("/")[0]),D.type||(D.type="audio")),D}/**
|
|||
|
* {@link GetRecorderType} is an inner/private helper for {@link RecordRTC}.
|
|||
|
* @summary It returns best recorder-type available for your browser.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef GetRecorderType
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var RecorderType = GetRecorderType(options);
|
|||
|
* var recorder = new RecorderType(options);
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
|
|||
|
*/function n(I,D){var O;return(F||h||A)&&(O=W),typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&!F&&(O=R),D.type==="video"&&(F||A)&&(O=de,typeof q<"u"&&typeof ReadableStream<"u"&&(O=q)),D.type==="gif"&&(O=le),D.type==="canvas"&&(O=Z),Q()&&O!==Z&&O!==le&&typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&(S(I,"video").length||S(I,"audio").length)&&(D.type==="audio"?typeof MediaRecorder.isTypeSupported=="function"&&MediaRecorder.isTypeSupported("audio/webm")&&(O=R):typeof MediaRecorder.isTypeSupported=="function"&&MediaRecorder.isTypeSupported("video/webm")&&(O=R)),I instanceof Array&&I.length&&(O=me),D.recorderType&&(O=D.recorderType),!D.disableLogs&&O&&O.name&&console.log("Using recorderType:",O.name||O.constructor.name),!O&&y&&(O=R),O}/**
|
|||
|
* MRecordRTC runs on top of {@link RecordRTC} to bring multiple recordings in a single place, by providing simple API.
|
|||
|
* @summary MRecordRTC stands for "Multiple-RecordRTC".
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef MRecordRTC
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new MRecordRTC();
|
|||
|
* recorder.addStream(MediaStream);
|
|||
|
* recorder.mediaType = {
|
|||
|
* audio: true, // or StereoAudioRecorder or MediaStreamRecorder
|
|||
|
* video: true, // or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder
|
|||
|
* gif: true // or GifRecorder
|
|||
|
* };
|
|||
|
* // mimeType is optional and should be set only in advance cases.
|
|||
|
* recorder.mimeType = {
|
|||
|
* audio: 'audio/wav',
|
|||
|
* video: 'video/webm',
|
|||
|
* gif: 'image/gif'
|
|||
|
* };
|
|||
|
* recorder.startRecording();
|
|||
|
* @see For further information:
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC/tree/master/MRecordRTC|MRecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @requires {@link RecordRTC}
|
|||
|
*/function a(I){this.addStream=function(D){D&&(I=D)},this.mediaType={audio:!0,video:!0},this.startRecording=function(){var D=this.mediaType,O,he=this.mimeType||{audio:null,video:null,gif:null};if(typeof D.audio!="function"&&Q()&&!S(I,"audio").length&&(D.audio=!1),typeof D.video!="function"&&Q()&&!S(I,"video").length&&(D.video=!1),typeof D.gif!="function"&&Q()&&!S(I,"video").length&&(D.gif=!1),!D.audio&&!D.video&&!D.gif)throw"MediaStream must have either audio or video tracks.";if(D.audio&&(O=null,typeof D.audio=="function"&&(O=D.audio),this.audioRecorder=new t(I,{type:"audio",bufferSize:this.bufferSize,sampleRate:this.sampleRate,numberOfAudioChannels:this.numberOfAudioChannels||2,disableLogs:this.disableLogs,recorderType:O,mimeType:he.audio,timeSlice:this.timeSlice,onTimeStamp:this.onTimeStamp}),D.video||this.audioRecorder.startRecording()),D.video){O=null,typeof D.video=="function"&&(O=D.video);var te=I;if(Q()&&D.audio&&typeof D.audio=="function"){var ae=S(I,"video")[0];p?(te=new E,te.addTrack(ae),O&&O===de&&(O=R)):(te=new E,te.addTrack(ae))}this.videoRecorder=new t(te,{type:"video",video:this.video,canvas:this.canvas,frameInterval:this.frameInterval||10,disableLogs:this.disableLogs,recorderType:O,mimeType:he.video,timeSlice:this.timeSlice,onTimeStamp:this.onTimeStamp,workerPath:this.workerPath,webAssemblyPath:this.webAssemblyPath,frameRate:this.frameRate,bitrate:this.bitrate}),D.audio||this.videoRecorder.startRecording()}if(D.audio&&D.video){var Ae=this,X=Q()===!0;(D.audio instanceof W&&D.video||D.audio!==!0&&D.video!==!0&&D.audio!==D.video)&&(X=!1),X===!0?(Ae.audioRecorder=null,Ae.videoRecorder.startRecording()):Ae.videoRecorder.initRecorder(function(){Ae.audioRecorder.initRecorder(function(){Ae.videoRecorder.startRecording(),Ae.audioRecorder.startRecording()})})}D.gif&&(O=null,typeof D.gif=="function"&&(O=D.gif),this.gifRecorder=new t(I,{type:"gif",frameRate:this.frameRate||200,quality:this.quality||10,disableLogs:this.disableLogs,recorderType:O,mimeType:he.gif}),this.gifRecorder.startRecording())},this.stopRecording=function(D){D=D||function(){},this.audioRecorder&&this.audioRecorder.stopRecording(function(O){D(O,"audio")}),this.videoRecorder&&this.videoRecorder.stopRecording(function(O){D(O,"video")}),this.gifRecorder&&this.gifRecorder.stopRecording(function(O){D(O,"gif")})},this.pauseRecording=function(){this.audioRecorder&&this.audioRecorder.pauseRecording(),this.videoRecorder&&this.videoRecorder.pauseRecording(),this.gifRecorder&&this.gifRecorder.pauseRecording()},this.resumeRecording=function(){this.audioRecorder&&this.audioRecorder.resumeRecording(),this.videoRecorder&&this.videoRecorder.resumeRecording(),this.gifRecorder&&this.gifRecorder.resumeRecording()},this.getBlob=function(D){var O={};return this.audioRecorder&&(O.audio=this.audioRecorder.getBlob()),this.videoRecorder&&(O.video=this.videoRecorder.getBlob()),this.gifRecorder&&(O.gif=this.gifRecorder.getBlob()),D&&D(O),O},this.destroy=function(){this.audioRecorder&&(this.audioRecorder.destroy(),this.audioRecorder=null),this.videoRecorder&&(this.videoRecorder.destroy(),this.videoRecorder=null),this.gifRecorder&&(this.gifRecorder.destroy(),this.gifRecorder=null)},this.getDataURL=function(D){this.getBlob(function(te){te.audio&&te.video?O(te.audio,function(ae){O(te.video,function(Ae){D({audio:ae,video:Ae})})}):te.audio?O(te.audio,function(ae){D({audio:ae})}):te.video&&O(te.video,function(ae){D({video:ae})})});function O(te,ae){if(typeof Worker<"u"){var Ae=he(function(we){postMessage(new FileReaderSync().readAsDataURL(we))});Ae.onmessage=function(ve){ae(ve.data)},Ae.postMessage(te)}else{var X=new FileReader;X.readAsDataURL(te),X.onload=function(ve){ae(ve.target.result)}}}function he(te){var ae=f.createObjectURL(new Blob([te.toString(),"this.onmessage = function (eee) {"+te.name+"(eee.data);}"],{type:"application/javascript"})),Ae=new Worker(ae),X;if(typeof f<"u")X=f;else if(typeof webkitURL<"u")X=webkitURL;else throw"Neither URL nor webkitURL detected.";return X.revokeObjectURL(ae),Ae}},this.writeToDisk=function(){t.writeToDisk({audio:this.audioRecorder,vid
|
|||
|
* Storage is a standalone object used by {@link RecordRTC} to store reusable objects e.g. "new AudioContext".
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @example
|
|||
|
* Storage.AudioContext === webkitAudioContext
|
|||
|
* @property {webkitAudioContext} AudioContext - Keeps a reference to AudioContext object.
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
*/var z={};typeof u<"u"?z.AudioContext=u:typeof webkitAudioContext<"u"&&(z.AudioContext=webkitAudioContext),typeof t<"u"&&(t.Storage=z);function Q(){if(p||y||h)return!0;var I=navigator.userAgent,D=""+parseFloat(navigator.appVersion),O=parseInt(navigator.appVersion,10),he,te;return(F||A)&&(he=I.indexOf("Chrome"),D=I.substring(he+7)),(te=D.indexOf(";"))!==-1&&(D=D.substring(0,te)),(te=D.indexOf(" "))!==-1&&(D=D.substring(0,te)),O=parseInt(""+D,10),isNaN(O)&&(D=""+parseFloat(navigator.appVersion),O=parseInt(navigator.appVersion,10)),O>=49}/**
|
|||
|
* MediaStreamRecorder is an abstraction layer for {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}. It is used by {@link RecordRTC} to record MediaStream(s) in both Chrome and Firefox.
|
|||
|
* @summary Runs top over {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://github.com/muaz-khan|Muaz Khan}
|
|||
|
* @typedef MediaStreamRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var config = {
|
|||
|
* mimeType: 'video/webm', // vp8, vp9, h264, mkv, opus/vorbis
|
|||
|
* audioBitsPerSecond : 256 * 8 * 1024,
|
|||
|
* videoBitsPerSecond : 256 * 8 * 1024,
|
|||
|
* bitsPerSecond: 256 * 8 * 1024, // if this is provided, skip above two
|
|||
|
* checkForInactiveTracks: true,
|
|||
|
* timeSlice: 1000, // concatenate intervals based blobs
|
|||
|
* ondataavailable: function() {} // get intervals based blobs
|
|||
|
* }
|
|||
|
* var recorder = new MediaStreamRecorder(mediaStream, config);
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
*
|
|||
|
* // or
|
|||
|
* var blob = recorder.blob;
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {disableLogs:true, initCallback: function, mimeType: "video/webm", timeSlice: 1000}
|
|||
|
* @throws Will throw an error if first argument "MediaStream" is missing. Also throws error if "MediaRecorder API" are not supported by the browser.
|
|||
|
*/function R(I,D){var ue=this;if(typeof I>"u")throw'First argument "MediaStream" is required.';if(typeof MediaRecorder>"u")throw"Your browser does not support the Media Recorder API. Please try other modules e.g. WhammyRecorder or StereoAudioRecorder.";if(D=D||{mimeType:"video/webm"},D.type==="audio"){if(S(I,"video").length&&S(I,"audio").length){var O;navigator.mozGetUserMedia?(O=new E,O.addTrack(S(I,"audio")[0])):O=new E(S(I,"audio")),I=O}(!D.mimeType||D.mimeType.toString().toLowerCase().indexOf("audio")===-1)&&(D.mimeType=F?"audio/webm":"audio/ogg"),D.mimeType&&D.mimeType.toString().toLowerCase()!=="audio/ogg"&&navigator.mozGetUserMedia&&(D.mimeType="audio/ogg")}var he=[];this.getArrayOfBlobs=function(){return he},this.record=function(){ue.blob=null,ue.clearRecordedData(),ue.timestamps=[],we=[],he=[];var ke=D;D.disableLogs||console.log("Passing following config over MediaRecorder API.",ke),X&&(X=null),F&&!Q()&&(ke="video/vp8"),typeof MediaRecorder.isTypeSupported=="function"&&ke.mimeType&&(MediaRecorder.isTypeSupported(ke.mimeType)||(D.disableLogs||console.warn("MediaRecorder API seems unable to record mimeType:",ke.mimeType),ke.mimeType=D.type==="audio"?"audio/webm":"video/webm"));try{X=new MediaRecorder(I,ke),D.mimeType=ke.mimeType}catch{X=new MediaRecorder(I)}ke.mimeType&&!MediaRecorder.isTypeSupported&&"canRecordMimeType"in X&&X.canRecordMimeType(ke.mimeType)===!1&&(D.disableLogs||console.warn("MediaRecorder API seems unable to record mimeType:",ke.mimeType)),X.ondataavailable=function(G){if(G.data&&we.push("ondataavailable: "+w(G.data.size)),typeof D.timeSlice=="number"){if(G.data&&G.data.size&&(he.push(G.data),te(),typeof D.ondataavailable=="function")){var K=D.getNativeBlob?G.data:new Blob([G.data],{type:ae(ke)});D.ondataavailable(K)}return}if(!G.data||!G.data.size||G.data.size<100||ue.blob){ue.recordingCallback&&(ue.recordingCallback(new Blob([],{type:ae(ke)})),ue.recordingCallback=null);return}ue.blob=D.getNativeBlob?G.data:new Blob([G.data],{type:ae(ke)}),ue.recordingCallback&&(ue.recordingCallback(ue.blob),ue.recordingCallback=null)},X.onstart=function(){we.push("started")},X.onpause=function(){we.push("paused")},X.onresume=function(){we.push("resumed")},X.onstop=function(){we.push("stopped")},X.onerror=function(G){G&&(G.name||(G.name="UnknownError"),we.push("error: "+G),D.disableLogs||(G.name.toString().toLowerCase().indexOf("invalidstate")!==-1?console.error("The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.",G):G.name.toString().toLowerCase().indexOf("notsupported")!==-1?console.error("MIME type (",ke.mimeType,") is not supported.",G):G.name.toString().toLowerCase().indexOf("security")!==-1?console.error("MediaRecorder security error",G):G.name==="OutOfMemory"?console.error("The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="IllegalStreamModification"?console.error("A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="OtherRecordingError"?console.error("Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="GenericError"?console.error("The UA cannot provide the codec or recording option that has been requested.",G):console.error("MediaRecorder Error",G)),function(K){if(!ue.manuallyStopped&&X&&X.state==="inactive"){delete D.timeslice,X.start(10*60*1e3);return}setTimeout(K,1e3)}(),X.state!=="inactive"&&X.state!=="stopped"&&X.stop())},typeof D.timeSlice=="number"?(te(),X.start(D.timeSlice)):X.start(36e5),D.initCallback&&D.initCallback()},this.timestamps=[];function te(){ue.timestamps.push(new Date().getTime()),typeof D.onTimeStamp=="function"&&D.onTimeStamp(ue.timestamps[ue.timestamps.length-1],ue.timestamps)}function ae(ke){return X&&X.m
|
|||
|
* StereoAudioRecorder is a standalone class used by {@link RecordRTC} to bring "stereo" audio-recording in chrome.
|
|||
|
* @summary JavaScript standalone object for stereo audio recording.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef StereoAudioRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new StereoAudioRecorder(MediaStream, {
|
|||
|
* sampleRate: 44100,
|
|||
|
* bufferSize: 4096
|
|||
|
* });
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {sampleRate: 44100, bufferSize: 4096, numberOfAudioChannels: 1, etc.}
|
|||
|
*/function W(I,D){if(!S(I,"audio").length)throw"Your stream has no audio tracks.";D=D||{};var O=this,he=[],te=[],ae=!1,Ae=0,X,ve=2,we=D.desiredSampRate;D.leftChannel===!0&&(ve=1),D.numberOfAudioChannels===1&&(ve=1),(!ve||ve<1)&&(ve=2),D.disableLogs||console.log("StereoAudioRecorder is set to record number of channels: "+ve),typeof D.checkForInactiveTracks>"u"&&(D.checkForInactiveTracks=!0);function ue(){if(D.checkForInactiveTracks===!1)return!0;if("active"in I){if(!I.active)return!1}else if("ended"in I&&I.ended)return!1;return!0}this.record=function(){if(ue()===!1)throw"Please make sure MediaStream is active.";Ie(),be=De=!1,ae=!0,typeof D.timeSlice<"u"&&Qe()};function ke(Ge,Ye){function et(ze,Je){var ht=ze.numberOfAudioChannels,Ze=ze.leftBuffers.slice(0),bt=ze.rightBuffers.slice(0),kt=ze.sampleRate,_t=ze.internalInterleavedLength,Lt=ze.desiredSampRate;ht===2&&(Ze=gi(Ze,_t),bt=gi(bt,_t),Lt&&(Ze=Dt(Ze,Lt,kt),bt=Dt(bt,Lt,kt))),ht===1&&(Ze=gi(Ze,_t),Lt&&(Ze=Dt(Ze,Lt,kt))),Lt&&(kt=Lt);function Dt(Di,In,cn){var pi=Math.round(Di.length*(In/cn)),nt=[],V=Number((Di.length-1)/(pi-1));nt[0]=Di[0];for(var ie=1;ie<pi-1;ie++){var _e=ie*V,qe=Number(Math.floor(_e)).toFixed(),st=Number(Math.ceil(_e)).toFixed(),Rt=_e-qe;nt[ie]=en(Di[qe],Di[st],Rt)}return nt[pi-1]=Di[Di.length-1],nt}function en(Di,In,cn){return Di+(In-Di)*cn}function gi(Di,In){for(var cn=new Float64Array(In),pi=0,nt=Di.length,V=0;V<nt;V++){var ie=Di[V];cn.set(ie,pi),pi+=ie.length}return cn}function as(Di,In){for(var cn=Di.length+In.length,pi=new Float64Array(cn),nt=0,V=0;V<cn;)pi[V++]=Di[nt],pi[V++]=In[nt],nt++;return pi}function ea(Di,In,cn){for(var pi=cn.length,nt=0;nt<pi;nt++)Di.setUint8(In+nt,cn.charCodeAt(nt))}var _a;ht===2&&(_a=as(Ze,bt)),ht===1&&(_a=Ze);var Ia=_a.length,Qs=44+Ia*2,Pa=new ArrayBuffer(Qs),ci=new DataView(Pa);ea(ci,0,"RIFF"),ci.setUint32(4,36+Ia*2,!0),ea(ci,8,"WAVE"),ea(ci,12,"fmt "),ci.setUint32(16,16,!0),ci.setUint16(20,1,!0),ci.setUint16(22,ht,!0),ci.setUint32(24,kt,!0),ci.setUint32(28,kt*ht*2,!0),ci.setUint16(32,ht*2,!0),ci.setUint16(34,16,!0),ea(ci,36,"data"),ci.setUint32(40,Ia*2,!0);for(var Wl=Ia,Tr=44,Ir=1,Pr=0;Pr<Wl;Pr++)ci.setInt16(Tr,_a[Pr]*(32767*Ir),!0),Tr+=2;if(Je)return Je({buffer:Pa,view:ci});postMessage({buffer:Pa,view:ci})}if(Ge.noWorker){et(Ge,function(ze){Ye(ze.buffer,ze.view)});return}var ye=G(et);ye.onmessage=function(ze){Ye(ze.data.buffer,ze.data.view),f.revokeObjectURL(ye.workerURL),ye.terminate()},ye.postMessage(Ge)}function G(Ge){var Ye=f.createObjectURL(new Blob([Ge.toString(),";this.onmessage = function (eee) {"+Ge.name+"(eee.data);}"],{type:"application/javascript"})),et=new Worker(Ye);return et.workerURL=Ye,et}this.stop=function(Ge){Ge=Ge||function(){},ae=!1,ke({desiredSampRate:we,sampleRate:ce,numberOfAudioChannels:ve,internalInterleavedLength:Ae,leftBuffers:he,rightBuffers:ve===1?[]:te,noWorker:D.noWorker},function(Ye,et){O.blob=new Blob([et],{type:"audio/wav"}),O.buffer=new ArrayBuffer(et.buffer.byteLength),O.view=et,O.sampleRate=we||ce,O.bufferSize=Be,O.length=Ae,be=!1,Ge&&Ge(O.blob)})},typeof t.Storage>"u"&&(t.Storage={AudioContextConstructor:null,AudioContext:window.AudioContext||window.webkitAudioContext}),(!t.Storage.AudioContextConstructor||t.Storage.AudioContextConstructor.state==="closed")&&(t.Storage.AudioContextConstructor=new t.Storage.AudioContext);var K=t.Storage.AudioContextConstructor,re=K.createMediaStreamSource(I),Ce=[0,256,512,1024,2048,4096,8192,16384],Be=typeof D.bufferSize>"u"?4096:D.bufferSize;if(Ce.indexOf(Be)===-1&&(D.disableLogs||console.log("Legal values for buffer-size are "+JSON.stringify(Ce,null," "))),K.createJavaScriptNode)X=K.createJavaScriptNode(Be,ve,ve);else if(K.createScriptProcessor)X=K.createScriptProcessor(Be,ve,ve);else throw"WebAudio API has no support on this browser.";re.connect(X),D.bufferSize||(Be=X.bufferSize);var ce=typeof D.sampleRate<"u"?D.sampleRate:K.sampleRate||44100;(ce<22050||ce>96e3)&&(D.disableLogs||console.log("sample-rate must be under range 22050 and 96000.")),D.disableLogs||D.desiredSampRate&&console.log("Desired sample-rate: "+D.desiredSampRate);var De=!1;th
|
|||
|
* CanvasRecorder is a standalone class used by {@link RecordRTC} to bring HTML5-Canvas recording into video WebM. It uses HTML2Canvas library and runs top over {@link Whammy}.
|
|||
|
* @summary HTML2Canvas recording into video WebM.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef CanvasRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new CanvasRecorder(htmlElement, { disableLogs: true, useWhammyRecorder: true });
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {HTMLElement} htmlElement - querySelector/getElementById/getElementsByTagName[0]/etc.
|
|||
|
* @param {object} config - {disableLogs:true, initCallback: function}
|
|||
|
*/function Z(I,D){if(typeof html2canvas>"u")throw"Please link: https://www.webrtc-experiment.com/screenshot.js";D=D||{},D.frameInterval||(D.frameInterval=10);var O=!1;["captureStream","mozCaptureStream","webkitCaptureStream"].forEach(function(Ce){Ce in document.createElement("canvas")&&(O=!0)});var he=(!!window.webkitRTCPeerConnection||!!window.webkitGetUserMedia)&&!!window.chrome,te=50,ae=navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./);he&&ae&&ae[2]&&(te=parseInt(ae[2],10)),he&&te<52&&(O=!1),D.useWhammyRecorder&&(O=!1);var Ae,X;if(O)if(D.disableLogs||console.log("Your browser supports both MediRecorder API and canvas.captureStream!"),I instanceof HTMLCanvasElement)Ae=I;else if(I instanceof CanvasRenderingContext2D)Ae=I.canvas;else throw"Please pass either HTMLCanvasElement or CanvasRenderingContext2D.";else navigator.mozGetUserMedia&&(D.disableLogs||console.error("Canvas recording is NOT supported in Firefox."));var ve;this.record=function(){if(ve=!0,O&&!D.useWhammyRecorder){var Ce;"captureStream"in Ae?Ce=Ae.captureStream(25):"mozCaptureStream"in Ae?Ce=Ae.mozCaptureStream(25):"webkitCaptureStream"in Ae&&(Ce=Ae.webkitCaptureStream(25));try{var Be=new E;Be.addTrack(S(Ce,"video")[0]),Ce=Be}catch{}if(!Ce)throw"captureStream API are NOT available.";X=new R(Ce,{mimeType:D.mimeType||"video/webm"}),X.record()}else re.frames=[],K=new Date().getTime(),G();D.initCallback&&D.initCallback()},this.getWebPImages=function(Ce){if(I.nodeName.toLowerCase()!=="canvas"){Ce();return}var Be=re.frames.length;re.frames.forEach(function(ce,De){var Ie=Be-De;D.disableLogs||console.log(Ie+"/"+Be+" frames remaining"),D.onEncodingCallback&&D.onEncodingCallback(Ie,Be);var fe=ce.image.toDataURL("image/webp",1);re.frames[De].image=fe}),D.disableLogs||console.log("Generating WebM"),Ce()},this.stop=function(Ce){ve=!1;var Be=this;if(O&&X){X.stop(Ce);return}this.getWebPImages(function(){re.compile(function(ce){D.disableLogs||console.log("Recording finished!"),Be.blob=ce,Be.blob.forEach&&(Be.blob=new Blob([],{type:"video/webm"})),Ce&&Ce(Be.blob),re.frames=[]})})};var we=!1;this.pause=function(){if(we=!0,X instanceof R){X.pause();return}},this.resume=function(){if(we=!1,X instanceof R){X.resume();return}ve||this.record()},this.clearRecordedData=function(){ve&&this.stop(ue),ue()};function ue(){re.frames=[],ve=!1,we=!1}this.name="CanvasRecorder",this.toString=function(){return this.name};function ke(){var Ce=document.createElement("canvas"),Be=Ce.getContext("2d");return Ce.width=I.width,Ce.height=I.height,Be.drawImage(I,0,0),Ce}function G(){if(we)return K=new Date().getTime(),setTimeout(G,500);if(I.nodeName.toLowerCase()==="canvas"){var Ce=new Date().getTime()-K;K=new Date().getTime(),re.frames.push({image:ke(),duration:Ce}),ve&&setTimeout(G,D.frameInterval);return}html2canvas(I,{grabMouse:typeof D.showMousePointer>"u"||D.showMousePointer,onrendered:function(Be){var ce=new Date().getTime()-K;if(!ce)return setTimeout(G,D.frameInterval);K=new Date().getTime(),re.frames.push({image:Be.toDataURL("image/webp",1),duration:ce}),ve&&setTimeout(G,D.frameInterval)}})}var K=new Date().getTime(),re=new Fe.Video(100)}typeof t<"u"&&(t.CanvasRecorder=Z);/**
|
|||
|
* WhammyRecorder is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It runs top over {@link Whammy}.
|
|||
|
* @summary Video recording feature in Chrome.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef WhammyRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new WhammyRecorder(mediaStream);
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {disableLogs: true, initCallback: function, video: HTMLVideoElement, etc.}
|
|||
|
*/function de(I,D){D=D||{},D.frameInterval||(D.frameInterval=10),D.disableLogs||console.log("Using frames-interval:",D.frameInterval),this.record=function(){D.width||(D.width=320),D.height||(D.height=240),D.video||(D.video={width:D.width,height:D.height}),D.canvas||(D.canvas={width:D.width,height:D.height}),ve.width=D.canvas.width||320,ve.height=D.canvas.height||240,we=ve.getContext("2d"),D.video&&D.video instanceof HTMLVideoElement?(ue=D.video.cloneNode(),D.initCallback&&D.initCallback()):(ue=document.createElement("video"),U(I,ue),ue.onloadedmetadata=function(){D.initCallback&&D.initCallback()},ue.width=D.video.width,ue.height=D.video.height),ue.muted=!0,ue.play(),ke=new Date().getTime(),G=new Fe.Video,D.disableLogs||(console.log("canvas resolutions",ve.width,"*",ve.height),console.log("video width/height",ue.width||ve.width,"*",ue.height||ve.height)),O(D.frameInterval)};function O(K){K=typeof K<"u"?K:10;var re=new Date().getTime()-ke;if(!re)return setTimeout(O,K,K);if(Ae)return ke=new Date().getTime(),setTimeout(O,100);ke=new Date().getTime(),ue.paused&&ue.play(),we.drawImage(ue,0,0,ve.width,ve.height),G.frames.push({duration:re,image:ve.toDataURL("image/webp")}),ae||setTimeout(O,K,K)}function he(K){var re=-1,Ce=K.length;(function Be(){if(re++,re===Ce){K.callback();return}setTimeout(function(){K.functionToLoop(Be,re)},1)})()}function te(K,re,Ce,Be,ce){var De=document.createElement("canvas");De.width=ve.width,De.height=ve.height;var Ie=De.getContext("2d"),fe=[],be=K.length,Ne={r:0,g:0,b:0},Ue=Math.sqrt(Math.pow(255,2)+Math.pow(255,2)+Math.pow(255,2)),Qe=0,Ge=0,Ye=!1;he({length:be,functionToLoop:function(et,ye){var ze,Je,ht,Ze=function(){!Ye&&ht-ze<=ht*Ge||(Ye=!0,fe.push(K[ye])),et()};if(Ye)Ze();else{var bt=new Image;bt.onload=function(){Ie.drawImage(bt,0,0,ve.width,ve.height);var kt=Ie.getImageData(0,0,ve.width,ve.height);ze=0,Je=kt.data.length,ht=kt.data.length/4;for(var _t=0;_t<Je;_t+=4){var Lt={r:kt.data[_t],g:kt.data[_t+1],b:kt.data[_t+2]},Dt=Math.sqrt(Math.pow(Lt.r-Ne.r,2)+Math.pow(Lt.g-Ne.g,2)+Math.pow(Lt.b-Ne.b,2));Dt<=Ue*Qe&&ze++}Ze()},bt.src=K[ye].image}},callback:function(){fe=fe.concat(K.slice(be)),fe.length<=0&&fe.push(K[K.length-1]),ce(fe)}})}var ae=!1;this.stop=function(K){K=K||function(){},ae=!0;var re=this;setTimeout(function(){te(G.frames,-1,null,null,function(Ce){G.frames=Ce,D.advertisement&&D.advertisement.length&&(G.frames=D.advertisement.concat(G.frames)),G.compile(function(Be){re.blob=Be,re.blob.forEach&&(re.blob=new Blob([],{type:"video/webm"})),K&&K(re.blob)})})},10)};var Ae=!1;this.pause=function(){Ae=!0},this.resume=function(){Ae=!1,ae&&this.record()},this.clearRecordedData=function(){ae||this.stop(X),X()};function X(){G.frames=[],ae=!0,Ae=!1}this.name="WhammyRecorder",this.toString=function(){return this.name};var ve=document.createElement("canvas"),we=ve.getContext("2d"),ue,ke,G}typeof t<"u"&&(t.WhammyRecorder=de);/**
|
|||
|
* Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}
|
|||
|
* @summary A real time javascript webm encoder based on a canvas hack.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef Whammy
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new Whammy().Video(15);
|
|||
|
* recorder.add(context || canvas || dataURL);
|
|||
|
* var output = recorder.compile();
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
*/var Fe=function(){function I(he){this.frames=[],this.duration=he||1,this.quality=.8}I.prototype.add=function(he,te){if("canvas"in he&&(he=he.canvas),"toDataURL"in he&&(he=he.toDataURL("image/webp",this.quality)),!/^data:image\/webp;base64,/ig.test(he))throw"Input must be formatted properly as a base64 encoded DataURI of type image/webp";this.frames.push({image:he,duration:te||this.duration})};function D(he){var te=f.createObjectURL(new Blob([he.toString(),"this.onmessage = function (eee) {"+he.name+"(eee.data);}"],{type:"application/javascript"})),ae=new Worker(te);return f.revokeObjectURL(te),ae}function O(he){function te(ce){var De=Ae(ce);if(!De)return[];for(var Ie=3e4,fe=[{id:440786851,data:[{data:1,id:17030},{data:1,id:17143},{data:4,id:17138},{data:8,id:17139},{data:"webm",id:17026},{data:2,id:17031},{data:2,id:17029}]},{id:408125543,data:[{id:357149030,data:[{data:1e6,id:2807729},{data:"whammy",id:19840},{data:"whammy",id:22337},{data:Ce(De.duration),id:17545}]},{id:374648427,data:[{id:174,data:[{data:1,id:215},{data:1,id:29637},{data:0,id:156},{data:"und",id:2274716},{data:"V_VP8",id:134},{data:"VP8",id:2459272},{data:1,id:131},{id:224,data:[{data:De.width,id:176},{data:De.height,id:186}]}]}]}]}],be=0,Ne=0;be<ce.length;){var Ue=[],Qe=0;do Ue.push(ce[be]),Qe+=ce[be].duration,be++;while(be<ce.length&&Qe<Ie);var Ge=0,Ye={id:524531317,data:ae(Ne,Ge,Ue)};fe[1].data.push(Ye),Ne+=Qe}return ue(fe)}function ae(ce,De,Ie){return[{data:ce,id:231}].concat(Ie.map(function(fe){var be=ke({frame:fe.data.slice(4),trackNum:1,timecode:Math.round(De)});return De+=fe.duration,{data:be,id:163}}))}function Ae(ce){if(!ce[0]){postMessage({error:"Something went wrong. Maybe WebP format is not supported in the current browser."});return}for(var De=ce[0].width,Ie=ce[0].height,fe=ce[0].duration,be=1;be<ce.length;be++)fe+=ce[be].duration;return{duration:fe,width:De,height:Ie}}function X(ce){for(var De=[];ce>0;)De.push(ce&255),ce=ce>>8;return new Uint8Array(De.reverse())}function ve(ce){return new Uint8Array(ce.split("").map(function(De){return De.charCodeAt(0)}))}function we(ce){var De=[],Ie=ce.length%8?new Array(9-ce.length%8).join("0"):"";ce=Ie+ce;for(var fe=0;fe<ce.length;fe+=8)De.push(parseInt(ce.substr(fe,8),2));return new Uint8Array(De)}function ue(ce){for(var De=[],Ie=0;Ie<ce.length;Ie++){var fe=ce[Ie].data;typeof fe=="object"&&(fe=ue(fe)),typeof fe=="number"&&(fe=we(fe.toString(2))),typeof fe=="string"&&(fe=ve(fe));var be=fe.size||fe.byteLength||fe.length,Ne=Math.ceil(Math.ceil(Math.log(be)/Math.log(2))/8),Ue=be.toString(2),Qe=new Array(Ne*7+7+1-Ue.length).join("0")+Ue,Ge=new Array(Ne).join("0")+"1"+Qe;De.push(X(ce[Ie].id)),De.push(we(Ge)),De.push(fe)}return new Blob(De,{type:"video/webm"})}function ke(ce){var De=0;De|=128;var Ie=[ce.trackNum|128,ce.timecode>>8,ce.timecode&255,De].map(function(fe){return String.fromCharCode(fe)}).join("")+ce.frame;return Ie}function G(ce){for(var De=ce.RIFF[0].WEBP[0],Ie=De.indexOf("*"),fe=0,be=[];fe<4;fe++)be[fe]=De.charCodeAt(Ie+3+fe);var Ne,Ue,Qe;return Qe=be[1]<<8|be[0],Ne=Qe&16383,Qe=be[3]<<8|be[2],Ue=Qe&16383,{width:Ne,height:Ue,data:De,riff:ce}}function K(ce,De){return parseInt(ce.substr(De+4,4).split("").map(function(Ie){var fe=Ie.charCodeAt(0).toString(2);return new Array(8-fe.length+1).join("0")+fe}).join(""),2)}function re(ce){for(var De=0,Ie={};De<ce.length;){var fe=ce.substr(De,4),be=K(ce,De),Ne=ce.substr(De+4+4,be);De+=8+be,Ie[fe]=Ie[fe]||[],fe==="RIFF"||fe==="LIST"?Ie[fe].push(re(Ne)):Ie[fe].push(Ne)}return Ie}function Ce(ce){return[].slice.call(new Uint8Array(new Float64Array([ce]).buffer),0).map(function(De){return String.fromCharCode(De)}).reverse().join("")}var Be=new te(he.map(function(ce){var De=G(re(atob(ce.image.slice(23))));return De.duration=ce.duration,De}));postMessage(Be)}return I.prototype.compile=function(he){var te=D(O);te.onmessage=function(ae){if(ae.data.error){console.error(ae.data.error);return}he(ae.data)},te.postMessage(this.frames)},{Video:I}}();typeof t<"u"&&(t.Whammy=Fe);/**
|
|||
|
* DiskStorage is a standalone object used by {@link RecordRTC} to store recorded blobs in IndexedDB storage.
|
|||
|
* @summary Writing blobs into IndexedDB.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @example
|
|||
|
* DiskStorage.Store({
|
|||
|
* audioBlob: yourAudioBlob,
|
|||
|
* videoBlob: yourVideoBlob,
|
|||
|
* gifBlob : yourGifBlob
|
|||
|
* });
|
|||
|
* DiskStorage.Fetch(function(dataURL, type) {
|
|||
|
* if(type === 'audioBlob') { }
|
|||
|
* if(type === 'videoBlob') { }
|
|||
|
* if(type === 'gifBlob') { }
|
|||
|
* });
|
|||
|
* // DiskStorage.dataStoreName = 'recordRTC';
|
|||
|
* // DiskStorage.onError = function(error) { };
|
|||
|
* @property {function} init - This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
|
|||
|
* @property {function} Fetch - This method fetches stored blobs from IndexedDB.
|
|||
|
* @property {function} Store - This method stores blobs in IndexedDB.
|
|||
|
* @property {function} onError - This function is invoked for any known/unknown error.
|
|||
|
* @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
*/var ge={init:function(){var I=this;if(typeof indexedDB>"u"||typeof indexedDB.open>"u"){console.error("IndexedDB API are not available in this browser.");return}var D=1,O=this.dbName||location.href.replace(/\/|:|#|%|\.|\[|\]/g,""),he,te=indexedDB.open(O,D);function ae(X){X.createObjectStore(I.dataStoreName)}function Ae(){var X=he.transaction([I.dataStoreName],"readwrite");I.videoBlob&&X.objectStore(I.dataStoreName).put(I.videoBlob,"videoBlob"),I.gifBlob&&X.objectStore(I.dataStoreName).put(I.gifBlob,"gifBlob"),I.audioBlob&&X.objectStore(I.dataStoreName).put(I.audioBlob,"audioBlob");function ve(we){X.objectStore(I.dataStoreName).get(we).onsuccess=function(ue){I.callback&&I.callback(ue.target.result,we)}}ve("audioBlob"),ve("videoBlob"),ve("gifBlob")}te.onerror=I.onError,te.onsuccess=function(){if(he=te.result,he.onerror=I.onError,he.setVersion)if(he.version!==D){var X=he.setVersion(D);X.onsuccess=function(){ae(he),Ae()}}else Ae();else Ae()},te.onupgradeneeded=function(X){ae(X.target.result)}},Fetch:function(I){return this.callback=I,this.init(),this},Store:function(I){return this.audioBlob=I.audioBlob,this.videoBlob=I.videoBlob,this.gifBlob=I.gifBlob,this.init(),this},onError:function(I){console.error(JSON.stringify(I,null," "))},dataStoreName:"recordRTC",dbName:null};typeof t<"u"&&(t.DiskStorage=ge);/**
|
|||
|
* GifRecorder is standalone calss used by {@link RecordRTC} to record video or canvas into animated gif.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef GifRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new GifRecorder(mediaStream || canvas || context, { onGifPreview: function, onGifRecordingStarted: function, width: 1280, height: 720, frameRate: 200, quality: 10 });
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* img.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object or HTMLCanvasElement or CanvasRenderingContext2D.
|
|||
|
* @param {object} config - {disableLogs:true, initCallback: function, width: 320, height: 240, frameRate: 200, quality: 10}
|
|||
|
*/function le(I,D){if(typeof GIFEncoder>"u"){var O=document.createElement("script");O.src="https://www.webrtc-experiment.com/gif-recorder.js",(document.body||document.documentElement).appendChild(O)}D=D||{};var he=I instanceof CanvasRenderingContext2D||I instanceof HTMLCanvasElement;this.record=function(){if(typeof GIFEncoder>"u"){setTimeout(K.record,1e3);return}if(!ve){setTimeout(K.record,1e3);return}he||(D.width||(D.width=we.offsetWidth||320),D.height||(D.height=we.offsetHeight||240),D.video||(D.video={width:D.width,height:D.height}),D.canvas||(D.canvas={width:D.width,height:D.height}),Ae.width=D.canvas.width||320,Ae.height=D.canvas.height||240,we.width=D.video.width||320,we.height=D.video.height||240),G=new GIFEncoder,G.setRepeat(0),G.setDelay(D.frameRate||200),G.setQuality(D.quality||10),G.start(),typeof D.onGifRecordingStarted=="function"&&D.onGifRecordingStarted();function re(Ce){if(K.clearedRecordedData!==!0){if(te)return setTimeout(function(){re(Ce)},100);ue=r(re),typeof ke===void 0&&(ke=Ce),!(Ce-ke<90)&&(!he&&we.paused&&we.play(),he||X.drawImage(we,0,0,Ae.width,Ae.height),D.onGifPreview&&D.onGifPreview(Ae.toDataURL("image/png")),G.addFrame(X),ke=Ce)}}ue=r(re),D.initCallback&&D.initCallback()},this.stop=function(re){re=re||function(){},ue&&l(ue),this.blob=new Blob([new Uint8Array(G.stream().bin)],{type:"image/gif"}),re(this.blob),G.stream().bin=[]};var te=!1;this.pause=function(){te=!0},this.resume=function(){te=!1},this.clearRecordedData=function(){K.clearedRecordedData=!0,ae()};function ae(){G&&(G.stream().bin=[])}this.name="GifRecorder",this.toString=function(){return this.name};var Ae=document.createElement("canvas"),X=Ae.getContext("2d");he&&(I instanceof CanvasRenderingContext2D?(X=I,Ae=X.canvas):I instanceof HTMLCanvasElement&&(X=I.getContext("2d"),Ae=I));var ve=!0;if(!he){var we=document.createElement("video");we.muted=!0,we.autoplay=!0,we.playsInline=!0,ve=!1,we.onloadedmetadata=function(){ve=!0},U(I,we),we.play()}var ue=null,ke,G,K=this}typeof t<"u"&&(t.GifRecorder=le);function se(I,D){var O="Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45";(function(fe){typeof t<"u"||fe&&(typeof window<"u"||typeof ii>"u"||(ii.navigator={userAgent:O,getUserMedia:function(){}},ii.console||(ii.console={}),(typeof ii.console.log>"u"||typeof ii.console.error>"u")&&(ii.console.error=ii.console.log=ii.console.log||function(){console.log(arguments)}),typeof document>"u"&&(fe.document={documentElement:{appendChild:function(){return""}}},document.createElement=document.captureStream=document.mozCaptureStream=function(){var be={getContext:function(){return be},play:function(){},pause:function(){},drawImage:function(){},toDataURL:function(){return""},style:{}};return be},fe.HTMLVideoElement=function(){}),typeof location>"u"&&(fe.location={protocol:"file:",href:"",hash:""}),typeof screen>"u"&&(fe.screen={width:0,height:0}),typeof we>"u"&&(fe.URL={createObjectURL:function(){return""},revokeObjectURL:function(){return""}}),fe.window=ii))})(typeof ii<"u"?ii:null),D=D||"multi-streams-mixer";var he=[],te=!1,ae=document.createElement("canvas"),Ae=ae.getContext("2d");ae.style.opacity=0,ae.style.position="absolute",ae.style.zIndex=-1,ae.style.top="-1000em",ae.style.left="-1000em",ae.className=D,(document.body||document.documentElement).appendChild(ae),this.disableLogs=!1,this.frameInterval=10,this.width=360,this.height=240,this.useGainNode=!0;var X=this,ve=window.AudioContext;typeof ve>"u"&&(typeof webkitAudioContext<"u"&&(ve=webkitAudioContext),typeof mozAudioContext<"u"&&(ve=mozAudioContext));var we=window.URL;typeof we>"u"&&typeof webkitURL<"u"&&(we=webkitURL),typeof navigator<"u"&&typeof navigator.getUserMedia>"u"&&(typeof navigator.webkitGetUserMedia<"u"&&(navigator.getUserMedia=navigator.webkitGetUserMedia),typeof navigator.mozGetUserMedia<"u"&&(navigator.getUserMedia=navigator.mozGetUserMedia));var ue=window.MediaStream;typeof ue>"u"&&typeof webkitMediaStream<"u"&&(ue=webkitMediaStream),typeof ue<"u"&&typeof ue.prototype.stop>"u"&&(ue.prototype.stop=function(){this.getTracks().forEach(function(f
|
|||
|
* MultiStreamRecorder can record multiple videos in single container.
|
|||
|
* @summary Multi-videos recorder.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef MultiStreamRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var options = {
|
|||
|
* mimeType: 'video/webm'
|
|||
|
* }
|
|||
|
* var recorder = new MultiStreamRecorder(ArrayOfMediaStreams, options);
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
*
|
|||
|
* // or
|
|||
|
* var blob = recorder.blob;
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStreams} mediaStreams - Array of MediaStreams.
|
|||
|
* @param {object} config - {disableLogs:true, frameInterval: 1, mimeType: "video/webm"}
|
|||
|
*/function me(I,D){I=I||[];var O=this,he,te;D=D||{elementClass:"multi-streams-mixer",mimeType:"video/webm",video:{width:360,height:240}},D.frameInterval||(D.frameInterval=10),D.video||(D.video={}),D.video.width||(D.video.width=360),D.video.height||(D.video.height=240),this.record=function(){he=new se(I,D.elementClass||"multi-streams-mixer"),ae().length&&(he.frameInterval=D.frameInterval||10,he.width=D.video.width||360,he.height=D.video.height||240,he.startDrawingFrames()),D.previewStream&&typeof D.previewStream=="function"&&D.previewStream(he.getMixedStream()),te=new R(he.getMixedStream(),D),te.record()};function ae(){var Ae=[];return I.forEach(function(X){S(X,"video").forEach(function(ve){Ae.push(ve)})}),Ae}this.stop=function(Ae){te&&te.stop(function(X){O.blob=X,Ae(X),O.clearRecordedData()})},this.pause=function(){te&&te.pause()},this.resume=function(){te&&te.resume()},this.clearRecordedData=function(){te&&(te.clearRecordedData(),te=null),he&&(he.releaseStreams(),he=null)},this.addStreams=function(Ae){if(!Ae)throw"First parameter is required.";Ae instanceof Array||(Ae=[Ae]),I.concat(Ae),!(!te||!he)&&(he.appendStreams(Ae),D.previewStream&&typeof D.previewStream=="function"&&D.previewStream(he.getMixedStream()))},this.resetVideoStreams=function(Ae){he&&(Ae&&!(Ae instanceof Array)&&(Ae=[Ae]),he.resetVideoStreams(Ae))},this.getMixer=function(){return he},this.name="MultiStreamRecorder",this.toString=function(){return this.name}}typeof t<"u"&&(t.MultiStreamRecorder=me);/**
|
|||
|
* RecordRTCPromisesHandler adds promises support in {@link RecordRTC}. Try a {@link https://github.com/muaz-khan/RecordRTC/blob/master/simple-demos/RecordRTCPromisesHandler.html|demo here}
|
|||
|
* @summary Promises for {@link RecordRTC}
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef RecordRTCPromisesHandler
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new RecordRTCPromisesHandler(mediaStream, options);
|
|||
|
* recorder.startRecording()
|
|||
|
* .then(successCB)
|
|||
|
* .catch(errorCB);
|
|||
|
* // Note: You can access all RecordRTC API using "recorder.recordRTC" e.g.
|
|||
|
* recorder.recordRTC.onStateChanged = function(state) {};
|
|||
|
* recorder.recordRTC.setRecordingDuration(5000);
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
|
|||
|
* @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
|
|||
|
* @throws Will throw an error if "new" keyword is not used to initiate "RecordRTCPromisesHandler". Also throws error if first argument "MediaStream" is missing.
|
|||
|
* @requires {@link RecordRTC}
|
|||
|
*/function $(I,D){if(!this)throw'Use "new RecordRTCPromisesHandler()"';if(typeof I>"u")throw'First argument "MediaStream" is required.';var O=this;O.recordRTC=new t(I,D),this.startRecording=function(){return new Promise(function(he,te){try{O.recordRTC.startRecording(),he()}catch(ae){te(ae)}})},this.stopRecording=function(){return new Promise(function(he,te){try{O.recordRTC.stopRecording(function(ae){if(O.blob=O.recordRTC.getBlob(),!O.blob||!O.blob.size){te("Empty blob.",O.blob);return}he(ae)})}catch(ae){te(ae)}})},this.pauseRecording=function(){return new Promise(function(he,te){try{O.recordRTC.pauseRecording(),he()}catch(ae){te(ae)}})},this.resumeRecording=function(){return new Promise(function(he,te){try{O.recordRTC.resumeRecording(),he()}catch(ae){te(ae)}})},this.getDataURL=function(he){return new Promise(function(te,ae){try{O.recordRTC.getDataURL(function(Ae){te(Ae)})}catch(Ae){ae(Ae)}})},this.getBlob=function(){return new Promise(function(he,te){try{he(O.recordRTC.getBlob())}catch(ae){te(ae)}})},this.getInternalRecorder=function(){return new Promise(function(he,te){try{he(O.recordRTC.getInternalRecorder())}catch(ae){te(ae)}})},this.reset=function(){return new Promise(function(he,te){try{he(O.recordRTC.reset())}catch(ae){te(ae)}})},this.destroy=function(){return new Promise(function(he,te){try{he(O.recordRTC.destroy())}catch(ae){te(ae)}})},this.getState=function(){return new Promise(function(he,te){try{he(O.recordRTC.getState())}catch(ae){te(ae)}})},this.blob=null,this.version="5.6.2"}typeof t<"u"&&(t.RecordRTCPromisesHandler=$);/**
|
|||
|
* WebAssemblyRecorder lets you create webm videos in JavaScript via WebAssembly. The library consumes raw RGBA32 buffers (4 bytes per pixel) and turns them into a webm video with the given framerate and quality. This makes it compatible out-of-the-box with ImageData from a CANVAS. With realtime mode you can also use webm-wasm for streaming webm videos.
|
|||
|
* @summary Video recording feature in Chrome, Firefox and maybe Edge.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef WebAssemblyRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new WebAssemblyRecorder(mediaStream);
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {webAssemblyPath:'webm-wasm.wasm',workerPath: 'webm-worker.js', frameRate: 30, width: 1920, height: 1080, bitrate: 1024, realtime: true}
|
|||
|
*/function q(I,D){(typeof ReadableStream>"u"||typeof WritableStream>"u")&&console.error("Following polyfill is strongly recommended: https://unpkg.com/@mattiasbuelens/web-streams-polyfill/dist/polyfill.min.js"),D=D||{},D.width=D.width||640,D.height=D.height||480,D.frameRate=D.frameRate||30,D.bitrate=D.bitrate||1200,D.realtime=D.realtime||!0;var O;function he(){return new ReadableStream({start:function(we){var ue=document.createElement("canvas"),ke=document.createElement("video"),G=!0;ke.srcObject=I,ke.muted=!0,ke.height=D.height,ke.width=D.width,ke.volume=0,ke.onplaying=function(){ue.width=D.width,ue.height=D.height;var K=ue.getContext("2d"),re=1e3/D.frameRate,Ce=setInterval(function(){if(O&&(clearInterval(Ce),we.close()),G&&(G=!1,D.onVideoProcessStarted&&D.onVideoProcessStarted()),K.drawImage(ke,0,0),we._controlledReadableStream.state!=="closed")try{we.enqueue(K.getImageData(0,0,D.width,D.height))}catch{}},re)},ke.play()}})}var te;function ae(we,ue){if(!D.workerPath&&!ue){O=!1,fetch("https://unpkg.com/webm-wasm@latest/dist/webm-worker.js").then(function(G){G.arrayBuffer().then(function(K){ae(we,K)})});return}if(!D.workerPath&&ue instanceof ArrayBuffer){var ke=new Blob([ue],{type:"text/javascript"});D.workerPath=f.createObjectURL(ke)}D.workerPath||console.error("workerPath parameter is missing."),te=new Worker(D.workerPath),te.postMessage(D.webAssemblyPath||"https://unpkg.com/webm-wasm@latest/dist/webm-wasm.wasm"),te.addEventListener("message",function(G){G.data==="READY"?(te.postMessage({width:D.width,height:D.height,bitrate:D.bitrate||1200,timebaseDen:D.frameRate||30,realtime:D.realtime}),he().pipeTo(new WritableStream({write:function(K){if(O){console.error("Got image, but recorder is finished!");return}te.postMessage(K.data.buffer,[K.data.buffer])}}))):G.data&&(Ae||ve.push(G.data))})}this.record=function(){ve=[],Ae=!1,this.blob=null,ae(I),typeof D.initCallback=="function"&&D.initCallback()};var Ae;this.pause=function(){Ae=!0},this.resume=function(){Ae=!1};function X(we){if(!te){we&&we();return}te.addEventListener("message",function(ue){ue.data===null&&(te.terminate(),te=null,we&&we())}),te.postMessage(null)}var ve=[];this.stop=function(we){O=!0;var ue=this;X(function(){ue.blob=new Blob(ve,{type:"video/webm"}),we(ue.blob)})},this.name="WebAssemblyRecorder",this.toString=function(){return this.name},this.clearRecordedData=function(){ve=[],Ae=!1,this.blob=null},this.blob=null}typeof t<"u"&&(t.WebAssemblyRecorder=q)}(up)),up.exports}var eK=$W();const R1=F1(eK),tK={name:"MessageContactInfo",components:{ContactsAddContact:kA,Icon:He},computed:{...J(Kt,["contacts"]),...J(oe,["config","IntlString","blockedNumbers"])},data(){return{phoneNumber:null,contact:null}},methods:{...Te(Ei,["setBrowsePicture"]),blockContact(){if(this.blockCooldown){M.onshow_notification({data:{img:"/public/img/Apps/light_mode/message.webp",apptitle:"APP_MESSAGES_NAME",title:"CONTACT_APP_CONTACT_VIEW_BLOCK_COOLDOWN",message:""}});return}if(this.blockCooldown=!0,setTimeout(()=>{this.blockCooldown=!1},5e3),this.blockedNumbers.includes(this.phoneNumber)){M.post("removeBlock",{number:this.phoneNumber});return}M.post("blockContact",{number:this.phoneNumber})},startVideoCall(){M.startVideoCall(this.phoneNumber)},closeContactInfo(){document.getElementById("contact-addcontact-box").classList.add("addcontact-out-class"),setTimeout(()=>{this.phoneNumber=null},180),Xe().emit("message_brightness",{brightness:!1})},addContact(){Xe().emit("addContact",{number:this.phoneNumber})},sendGPSLocation(){M.sendMessage(this.phoneNumber,"%pos%"),this.closeContactInfo()},startCall(){M.startCall(this.phoneNumber)},openMail(e){xe.push("/mail/list/"+e)}},mounted(){this.emitter.on("openMessageContactInfo",e=>{if(!e.number){xe.push("/messages/list");return}this.phoneNumber=e.number;let t=this.contacts.filter(i=>i.number==e.number);t.length>0&&(this.contact=t[0]),Xe().emit("message_brightness",{brightness:!0})})},beforeUnmount(){this.emitter.off("openMessageContactInfo")}},iK={key:0,class:"contact-addcontact-box",id:"contact-addcontact-box"},nK=["src"],aK={class:"message-group-edit
|
|||
|
* Leaflet 1.9.4, a JS library for interactive maps. https://leafletjs.com
|
|||
|
* (c) 2010-2023 Vladimir Agafonkin, (c) 2010-2011 CloudMade
|
|||
|
*/var bme=Fu.exports,vv;function vme(){return vv||(vv=1,function(e,t){(function(i,n){n(t)})(bme,function(i){var n="1.9.4";function a(c){var g,v,k,P;for(v=1,k=arguments.length;v<k;v++){P=arguments[v];for(g in P)c[g]=P[g]}return c}var s=Object.create||function(){function c(){}return function(g){return c.prototype=g,new c}}();function r(c,g){var v=Array.prototype.slice;if(c.bind)return c.bind.apply(c,v.call(arguments,1));var k=v.call(arguments,2);return function(){return c.apply(g,k.length?k.concat(v.call(arguments)):arguments)}}var o=0;function l(c){return"_leaflet_id"in c||(c._leaflet_id=++o),c._leaflet_id}function u(c,g,v){var k,P,j,pe;return pe=function(){k=!1,P&&(j.apply(v,P),P=!1)},j=function(){k?P=arguments:(c.apply(v,arguments),setTimeout(pe,g),k=!0)},j}function f(c,g,v){var k=g[1],P=g[0],j=k-P;return c===k&&v?c:((c-P)%j+j)%j+P}function h(){return!1}function A(c,g){if(g===!1)return c;var v=Math.pow(10,g===void 0?6:g);return Math.round(c*v)/v}function p(c){return c.trim?c.trim():c.replace(/^\s+|\s+$/g,"")}function F(c){return p(c).split(/\s+/)}function y(c,g){Object.prototype.hasOwnProperty.call(c,"options")||(c.options=c.options?s(c.options):{});for(var v in g)c.options[v]=g[v];return c.options}function E(c,g,v){var k=[];for(var P in c)k.push(encodeURIComponent(v?P.toUpperCase():P)+"="+encodeURIComponent(c[P]));return(!g||g.indexOf("?")===-1?"?":"&")+k.join("&")}var w=/\{ *([\w_ -]+) *\}/g;function C(c,g){return c.replace(w,function(v,k){var P=g[k];if(P===void 0)throw new Error("No value provided for variable "+v);return typeof P=="function"&&(P=P(g)),P})}var B=Array.isArray||function(c){return Object.prototype.toString.call(c)==="[object Array]"};function S(c,g){for(var v=0;v<c.length;v++)if(c[v]===g)return v;return-1}var U="data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs=";function N(c){return window["webkit"+c]||window["moz"+c]||window["ms"+c]}var z=0;function Q(c){var g=+new Date,v=Math.max(0,16-(g-z));return z=g+v,window.setTimeout(c,v)}var R=window.requestAnimationFrame||N("RequestAnimationFrame")||Q,W=window.cancelAnimationFrame||N("CancelAnimationFrame")||N("CancelRequestAnimationFrame")||function(c){window.clearTimeout(c)};function Z(c,g,v){if(v&&R===Q)c.call(g);else return R.call(window,r(c,g))}function de(c){c&&W.call(window,c)}var Fe={__proto__:null,extend:a,create:s,bind:r,get lastId(){return o},stamp:l,throttle:u,wrapNum:f,falseFn:h,formatNum:A,trim:p,splitWords:F,setOptions:y,getParamString:E,template:C,isArray:B,indexOf:S,emptyImageUrl:U,requestFn:R,cancelFn:W,requestAnimFrame:Z,cancelAnimFrame:de};function ge(){}ge.extend=function(c){var g=function(){y(this),this.initialize&&this.initialize.apply(this,arguments),this.callInitHooks()},v=g.__super__=this.prototype,k=s(v);k.constructor=g,g.prototype=k;for(var P in this)Object.prototype.hasOwnProperty.call(this,P)&&P!=="prototype"&&P!=="__super__"&&(g[P]=this[P]);return c.statics&&a(g,c.statics),c.includes&&(le(c.includes),a.apply(null,[k].concat(c.includes))),a(k,c),delete k.statics,delete k.includes,k.options&&(k.options=v.options?s(v.options):{},a(k.options,c.options)),k._initHooks=[],k.callInitHooks=function(){if(!this._initHooksCalled){v.callInitHooks&&v.callInitHooks.call(this),this._initHooksCalled=!0;for(var j=0,pe=k._initHooks.length;j<pe;j++)k._initHooks[j].call(this)}},g},ge.include=function(c){var g=this.prototype.options;return a(this.prototype,c),c.options&&(this.prototype.options=g,this.mergeOptions(c.options)),this},ge.mergeOptions=function(c){return a(this.prototype.options,c),this},ge.addInitHook=function(c){var g=Array.prototype.slice.call(arguments,1),v=typeof c=="function"?c:function(){this[c].apply(this,g)};return this.prototype._initHooks=this.prototype._initHooks||[],this.prototype._initHooks.push(v),this};function le(c){if(!(typeof L>"u"||!L||!L.Mixin)){c=B(c)?c:[c];for(var g=0;g<c.length;g++)c[g]===L.Mixin.Events&&console.warn("Deprecated include of L.Mixin.Events: this property will be removed in future releases, please inherit from L.Evented instead.",new Error().stack)}}var se={on:function(c,g,v){if(typeof c=="ob
|
|||
|
`,isFavourite:1,isDeleted:0,time:new Date},{id:4,identifier:"hdewhew",picture:"https://wallpapers.com/images/hd/fivem-9z6vdna3pkg05t7w.jpg",isFavourite:0,isDeleted:1,time:new Date}]}testYellowPage(){const t=Kr();t.posts=[{id:1,title:"Tes434343rr43r43rrrrrrrrrr7843874r3867r436784r3687543876t",text:"Te 43243 43r43r434r3r43r43 4r3434354535345433545435433454r3r43r43434343rst",number:122121},{id:2,title:"Auto zu verkaufen 223e32 32 32r323 r332",text:"Hey ich biete hier meinen Lamborghini an zu verkaufen Preis: 500 euro schnapper gönnt euch.",image:"https://www.lamborghini.com/sites/it-en/files/DAM/lamborghini/facelift_2019/models_gw/2023/03_29_revuelto/gate_models_s_02_m.jpg",number:211}]}testNewsPosts(){const t=Yr();t.posts=[{id:1,title:"Eröffnungs Cafe",text:"Hey wir eröffnen heute unser cafe in der blumenstraße!",number:122121,image:"https://cdn.discordapp.com/attachments/880552660447658007/1228016373020753930/screenshot.png?ex=662a825e&is=66180d5e&hm=bece92667d5374dbd805499697a43a0abfeb8617089cd9a3d73698ccb1f15438&"},{id:2,title:"Eröffnungs Cafe",text:"Hey wir eröffnen heute unser cafe in der blumenstraße!",number:122121,image:"https://www.swr.de/wissen/1000-antworten/1676037642957%2Cblumenwiese-118~_v-16x9@2dM_-ad6791ade5eb8b5c935dd377130b903c4b5781d8.jpg"}]}camera_open(){return this.post("camera_open")}onTakePhoto(){Xe().emit("takePhoto")}twitter_login(t,i){return this.post("twitter_login",{username:t,password:i})}twitter_postComment(t,i,n,a){return this.post("twitter_postComment",{username:t,password:i,post:n,comment:a})}ontwitter_addComment(t){Xe().emit("addTwitterComment",t.comment)}twitter_changePassword(t,i,n){return this.post("twitter_changePassword",{username:t,password:i,newPassword:n})}twitter_createAccount(t,i,n){return this.post("twitter_createAccount",{username:t,password:i,avatarUrl:n})}twitter_postTweet(t,i,n,a){this.post("twitter_postTweet",{username:t,password:i,message:n,image:a}).then(()=>{xe.push("/twitter/home")})}twitter_postTweetImg(t,i,n){return this.post("twitter_postTweetImg",{username:t,password:i,message:n})}twitter_toggleLikeTweet(t,i,n){return this.post("twitter_toggleLikeTweet",{username:t,password:i,tweetId:n})}twitter_setAvatar(t,i,n){return this.post("twitter_setAvatarUrl",{username:t,password:i,avatarUrl:n})}twitter_getTweets(t,i){return this.post("twitter_getTweets",{username:t,password:i})}twitter_getUserTweets(t,i){return this.post("twitter_getUserTweets",{username:t,password:i})}twitter_deleteTweet(t,i,n){return this.post("twitter_userssDeleteTweet",{username:t,password:i,tweetId:n})}twitter_logout(){return yi().twitterLogout(),this.post("twitter_logout")}async ontwitter_tweets(t){let i=[],n=[];t.tweets.forEach(s=>{n.push(new Promise((r,o)=>{let l=new Image;l.onerror=function(){s.authorIcon="/public/img/user.png",i.push(s),r()},l.onload=function(){i.push(s),r()},l.src=s.authorIcon}))}),await Promise.all(n);const a=yi();a.tweets=i}ontwitter_newTweet(t){const i=yi(),n=i.tweets;i.tweets=[t.tweet,...n]}ontwitter_newpost(t){const i=yi(),n=oe();i.twitterNotification===1&&i.twitterUsername!==t.post.author&&i.twitterUsername!=null&&localStorage.roadphone_app_twitter_app==="1"&&(this.onshow_notification({data:{apptitle:n.IntlString("APP_TWITTER_NOTIF_NEW_POST_TITLE"),message:"",title:t.post.author+" "+n.IntlString("APP_TWITTER_NOTIF_NEW_POST"),img:"/public/img/Apps/light_mode/tweetwave.webp"}}),this.onsetLockscreenNotify({apptitle:n.IntlString("APP_TWITTER_NOTIF_NEW_POST_TITLE"),title:t.post.author+" "+n.IntlString("APP_TWITTER_NOTIF_NEW_POST"),message:"",img:"/public/img/Apps/light_mode/tweetwave.webp",app:"twitter"}))}ontwitter_setAccount(t){yi().setAccount(t)}ontwitter_updateTweetLikes(t){const i=yi(),n=i.tweets.findIndex(s=>s.id===t.tweetId);n!==-1&&(i.tweets[n].likes=t.likes);const a=i.userTweets.findIndex(s=>s.id===t.tweetId);a!==-1&&(i.userTweets[a].likes=t.likes)}ontwitter_setTweetLikes(t){const i=yi(),n=i.tweets.findIndex(s=>s.id===t.tweetId);n!==-1&&(i.tweets[n].isLiked=t.isLiked);const a=i.userTweets.findIndex(s=>s.id===t.tweetId);a!==-1&&(i.userTweets[a].isLiked=t.isLike
|
|||
|
* html2canvas 1.4.1 <https://html2canvas.hertzen.com>
|
|||
|
* Copyright (c) 2022 Niklas von Hertzen <https://hertzen.com>
|
|||
|
* Released under MIT License
|
|||
|
*//*! *****************************************************************************
|
|||
|
Copyright (c) Microsoft Corporation.
|
|||
|
|
|||
|
Permission to use, copy, modify, and/or distribute this software for any
|
|||
|
purpose with or without fee is hereby granted.
|
|||
|
|
|||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|||
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|||
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|||
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|||
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|||
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|||
|
PERFORMANCE OF THIS SOFTWARE.
|
|||
|
***************************************************************************** */var s_=function(e,t){return s_=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(i,n){i.__proto__=n}||function(i,n){for(var a in n)Object.prototype.hasOwnProperty.call(n,a)&&(i[a]=n[a])},s_(e,t)};function Ta(e,t){if(typeof t!="function"&&t!==null)throw new TypeError("Class extends value "+String(t)+" is not a constructor or null");s_(e,t);function i(){this.constructor=e}e.prototype=t===null?Object.create(t):(i.prototype=t.prototype,new i)}var r_=function(){return r_=Object.assign||function(t){for(var i,n=1,a=arguments.length;n<a;n++){i=arguments[n];for(var s in i)Object.prototype.hasOwnProperty.call(i,s)&&(t[s]=i[s])}return t},r_.apply(this,arguments)};function wn(e,t,i,n){function a(s){return s instanceof i?s:new i(function(r){r(s)})}return new(i||(i=Promise))(function(s,r){function o(f){try{u(n.next(f))}catch(h){r(h)}}function l(f){try{u(n.throw(f))}catch(h){r(h)}}function u(f){f.done?s(f.value):a(f.value).then(o,l)}u((n=n.apply(e,[])).next())})}function dn(e,t){var i={label:0,sent:function(){if(s[0]&1)throw s[1];return s[1]},trys:[],ops:[]},n,a,s,r;return r={next:o(0),throw:o(1),return:o(2)},typeof Symbol=="function"&&(r[Symbol.iterator]=function(){return this}),r;function o(u){return function(f){return l([u,f])}}function l(u){if(n)throw new TypeError("Generator is already executing.");for(;i;)try{if(n=1,a&&(s=u[0]&2?a.return:u[0]?a.throw||((s=a.return)&&s.call(a),0):a.next)&&!(s=s.call(a,u[1])).done)return s;switch(a=0,s&&(u=[u[0]&2,s.value]),u[0]){case 0:case 1:s=u;break;case 4:return i.label++,{value:u[1],done:!1};case 5:i.label++,a=u[1],u=[0];continue;case 7:u=i.ops.pop(),i.trys.pop();continue;default:if(s=i.trys,!(s=s.length>0&&s[s.length-1])&&(u[0]===6||u[0]===2)){i=0;continue}if(u[0]===3&&(!s||u[1]>s[0]&&u[1]<s[3])){i.label=u[1];break}if(u[0]===6&&i.label<s[1]){i.label=s[1],s=u;break}if(s&&i.label<s[2]){i.label=s[2],i.ops.push(u);break}s[2]&&i.ops.pop(),i.trys.pop();continue}u=t.call(e,i)}catch(f){u=[6,f],a=0}finally{n=s=0}if(u[0]&5)throw u[1];return{value:u[0]?u[1]:void 0,done:!0}}}function Hd(e,t,i){if(arguments.length===2)for(var n=0,a=t.length,s;n<a;n++)(s||!(n in t))&&(s||(s=Array.prototype.slice.call(t,0,n)),s[n]=t[n]);return e.concat(s||t)}var Ms=function(){function e(t,i,n,a){this.left=t,this.top=i,this.width=n,this.height=a}return e.prototype.add=function(t,i,n,a){return new e(this.left+t,this.top+i,this.width+n,this.height+a)},e.fromClientRect=function(t,i){return new e(i.left+t.windowBounds.left,i.top+t.windowBounds.top,i.width,i.height)},e.fromDOMRectList=function(t,i){var n=Array.from(i).find(function(a){return a.width!==0});return n?new e(n.left+t.windowBounds.left,n.top+t.windowBounds.top,n.width,n.height):e.EMPTY},e.EMPTY=new e(0,0,0,0),e}(),RA=function(e,t){return Ms.fromClientRect(e,t.getBoundingClientRect())},A3e=function(e){var t=e.body,i=e.documentElement;if(!t||!i)throw new Error("Unable to get document size");var n=Math.max(Math.max(t.scrollWidth,i.scrollWidth),Math.max(t.offsetWidth,i.offsetWidth),Math.max(t.clientWidth,i.clientWidth)),a=Math.max(Math.max(t.scrollHeight,i.scrollHeight),Math.max(t.offsetHeight,i.offsetHeight),Math.max(t.clientHeight,i.clientHeight));return new Ms(0,0,n,a)},UA=function(e){for(var t=[],i=0,n=e.length;i<n;){var a=e.charCodeAt(i++);if(a>=55296&&a<=56319&&i<n){var s=e.charCodeAt(i++);(s&64512)===56320?t.push(((a&1023)<<10)+(s&1023)+65536):(t.push(a),i--)}else t.push(a)}return t},Li=function(){for(var e=[],t=0;t<arguments.length;t++)e[t]=arguments[t];if(String.fromCodePoint)return String.fromCodePoint.apply(String,e);var i=e.length;if(!i)return"";for(var n=[],a=-1,s="";++a<i;){var r=e[a];r<=65535?n.push(r):(r-=65536,n.push((r>>10)+55296,r%1024+56320)),(a+1===i||n.length>16384)&&(s+=String.fromCharCode.apply(String,n),n.length=0)}return s},yv="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",g3e=typeof Uint8Array>"u"?[]:new Uint8Array(256);for(var Qd=0;Qd<yv.length;Qd++)g3e[yv.charCodeAt(Qd)]=Qd;var wv="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvw
|
|||
|
content: "" !important;
|
|||
|
display: none !important;
|
|||
|
}`,$ke=function(e){eBe(e,"."+E_+Jke+m2+`
|
|||
|
.`+k_+Zke+m2)},eBe=function(e,t){var i=e.ownerDocument;if(i){var n=i.createElement("style");n.textContent=t,e.appendChild(n)}},tE=function(){function e(){}return e.getOrigin=function(t){var i=e._link;return i?(i.href=t,i.href=i.href,i.protocol+i.hostname+i.port):"about:blank"},e.isSameOrigin=function(t){return e.getOrigin(t)===e._origin},e.setContext=function(t){e._link=t.document.createElement("a"),e._origin=e.getOrigin(t.location.href)},e._origin="about:blank",e}(),tBe=function(){function e(t,i){this.context=t,this._options=i,this._cache={}}return e.prototype.addImage=function(t){var i=Promise.resolve();return this.has(t)||(Tp(t)||sBe(t))&&(this._cache[t]=this.loadImage(t)).catch(function(){}),i},e.prototype.match=function(t){return this._cache[t]},e.prototype.loadImage=function(t){return wn(this,void 0,void 0,function(){var i,n,a,s,r=this;return dn(this,function(o){switch(o.label){case 0:return i=tE.isSameOrigin(t),n=!xp(t)&&this._options.useCORS===!0&&an.SUPPORT_CORS_IMAGES&&!i,a=!xp(t)&&!i&&!Tp(t)&&typeof this._options.proxy=="string"&&an.SUPPORT_CORS_XHR&&!n,!i&&this._options.allowTaint===!1&&!xp(t)&&!Tp(t)&&!a&&!n?[2]:(s=t,a?[4,this.proxy(s)]:[3,2]);case 1:s=o.sent(),o.label=2;case 2:return this.context.logger.debug("Added image "+t.substring(0,256)),[4,new Promise(function(l,u){var f=new Image;f.onload=function(){return l(f)},f.onerror=u,(rBe(s)||n)&&(f.crossOrigin="anonymous"),f.src=s,f.complete===!0&&setTimeout(function(){return l(f)},500),r._options.imageTimeout>0&&setTimeout(function(){return u("Timed out ("+r._options.imageTimeout+"ms) loading image")},r._options.imageTimeout)})];case 3:return[2,o.sent()]}})})},e.prototype.has=function(t){return typeof this._cache[t]<"u"},e.prototype.keys=function(){return Promise.resolve(Object.keys(this._cache))},e.prototype.proxy=function(t){var i=this,n=this._options.proxy;if(!n)throw new Error("No proxy defined");var a=t.substring(0,256);return new Promise(function(s,r){var o=an.SUPPORT_RESPONSE_TYPE?"blob":"text",l=new XMLHttpRequest;l.onload=function(){if(l.status===200)if(o==="text")s(l.response);else{var h=new FileReader;h.addEventListener("load",function(){return s(h.result)},!1),h.addEventListener("error",function(A){return r(A)},!1),h.readAsDataURL(l.response)}else r("Failed to proxy resource "+a+" with status code "+l.status)},l.onerror=r;var u=n.indexOf("?")>-1?"&":"?";if(l.open("GET",""+n+u+"url="+encodeURIComponent(t)+"&responseType="+o),o!=="text"&&l instanceof XMLHttpRequest&&(l.responseType=o),i._options.imageTimeout){var f=i._options.imageTimeout;l.timeout=f,l.ontimeout=function(){return r("Timed out ("+f+"ms) proxying "+a)}}l.send()})},e}(),iBe=/^data:image\/svg\+xml/i,nBe=/^data:image\/.*;base64,/i,aBe=/^data:image\/.*/i,sBe=function(e){return an.SUPPORT_SVG_DRAWING||!oBe(e)},xp=function(e){return aBe.test(e)},rBe=function(e){return nBe.test(e)},Tp=function(e){return e.substr(0,4)==="blob"},oBe=function(e){return e.substr(-3).toLowerCase()==="svg"||iBe.test(e)},at=function(){function e(t,i){this.type=0,this.x=t,this.y=i}return e.prototype.add=function(t,i){return new e(this.x+t,this.y+i)},e}(),zo=function(e,t,i){return new at(e.x+(t.x-e.x)*i,e.y+(t.y-e.y)*i)},rf=function(){function e(t,i,n,a){this.type=1,this.start=t,this.startControl=i,this.endControl=n,this.end=a}return e.prototype.subdivide=function(t,i){var n=zo(this.start,this.startControl,t),a=zo(this.startControl,this.endControl,t),s=zo(this.endControl,this.end,t),r=zo(n,a,t),o=zo(a,s,t),l=zo(r,o,t);return i?new e(this.start,n,r,l):new e(l,o,s,this.end)},e.prototype.add=function(t,i){return new e(this.start.add(t,i),this.startControl.add(t,i),this.endControl.add(t,i),this.end.add(t,i))},e.prototype.reverse=function(){return new e(this.end,this.endControl,this.startControl,this.start)},e}(),la=function(e){return e.type===1},lBe=function(){function e(t){var i=t.styles,n=t.bounds,a=Cu(i.borderTopLeftRadius,n.width,n.height),s=a[0],r=a[1],o=Cu(i.borderTopRightRadius,n.width,n.height),l=o[0],u=o[1],f=Cu(i.borderBottomRightRadius,n.width,n.height),h=f[0],A=f[1],p=Cu(i.borderBottomLeftRadius,n
|
|||
|
* Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com
|
|||
|
* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
|
|||
|
* Copyright 2024 Fonticons, Inc.
|
|||
|
*/function v6e(e,t,i){return(t=w6e(t))in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function v2(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter(function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable})),i.push.apply(i,n)}return i}function Ke(e){for(var t=1;t<arguments.length;t++){var i=arguments[t]!=null?arguments[t]:{};t%2?v2(Object(i),!0).forEach(function(n){v6e(e,n,i[n])}):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(i)):v2(Object(i)).forEach(function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(i,n))})}return e}function y6e(e,t){if(typeof e!="object"||!e)return e;var i=e[Symbol.toPrimitive];if(i!==void 0){var n=i.call(e,t);if(typeof n!="object")return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return(t==="string"?String:Number)(e)}function w6e(e){var t=y6e(e,"string");return typeof t=="symbol"?t:t+""}const y2=()=>{};let X1={},lE={},uE=null,cE={mark:y2,measure:y2};try{typeof window<"u"&&(X1=window),typeof document<"u"&&(lE=document),typeof MutationObserver<"u"&&(uE=MutationObserver),typeof performance<"u"&&(cE=performance)}catch{}const{userAgent:w2=""}=X1.navigator||{},Cr=X1,_i=lE,C2=uE,cf=cE;Cr.document;const Hs=!!_i.documentElement&&!!_i.head&&typeof _i.addEventListener=="function"&&typeof _i.createElement=="function",dE=~w2.indexOf("MSIE")||~w2.indexOf("Trident/");var C6e=/fa(s|r|l|t|d|dr|dl|dt|b|k|kd|ss|sr|sl|st|sds|sdr|sdl|sdt)?[\-\ ]/,E6e=/Font ?Awesome ?([56 ]*)(Solid|Regular|Light|Thin|Duotone|Brands|Free|Pro|Sharp Duotone|Sharp|Kit)?.*/i,fE={classic:{fa:"solid",fas:"solid","fa-solid":"solid",far:"regular","fa-regular":"regular",fal:"light","fa-light":"light",fat:"thin","fa-thin":"thin",fab:"brands","fa-brands":"brands"},duotone:{fa:"solid",fad:"solid","fa-solid":"solid","fa-duotone":"solid",fadr:"regular","fa-regular":"regular",fadl:"light","fa-light":"light",fadt:"thin","fa-thin":"thin"},sharp:{fa:"solid",fass:"solid","fa-solid":"solid",fasr:"regular","fa-regular":"regular",fasl:"light","fa-light":"light",fast:"thin","fa-thin":"thin"},"sharp-duotone":{fa:"solid",fasds:"solid","fa-solid":"solid",fasdr:"regular","fa-regular":"regular",fasdl:"light","fa-light":"light",fasdt:"thin","fa-thin":"thin"}},k6e={GROUP:"duotone-group",PRIMARY:"primary",SECONDARY:"secondary"},hE=["fa-classic","fa-duotone","fa-sharp","fa-sharp-duotone"],bn="classic",VA="duotone",B6e="sharp",S6e="sharp-duotone",AE=[bn,VA,B6e,S6e],D6e={classic:{900:"fas",400:"far",normal:"far",300:"fal",100:"fat"},duotone:{900:"fad",400:"fadr",300:"fadl",100:"fadt"},sharp:{900:"fass",400:"fasr",300:"fasl",100:"fast"},"sharp-duotone":{900:"fasds",400:"fasdr",300:"fasdl",100:"fasdt"}},x6e={"Font Awesome 6 Free":{900:"fas",400:"far"},"Font Awesome 6 Pro":{900:"fas",400:"far",normal:"far",300:"fal",100:"fat"},"Font Awesome 6 Brands":{400:"fab",normal:"fab"},"Font Awesome 6 Duotone":{900:"fad",400:"fadr",normal:"fadr",300:"fadl",100:"fadt"},"Font Awesome 6 Sharp":{900:"fass",400:"fasr",normal:"fasr",300:"fasl",100:"fast"},"Font Awesome 6 Sharp Duotone":{900:"fasds",400:"fasdr",normal:"fasdr",300:"fasdl",100:"fasdt"}},T6e=new Map([["classic",{defaultShortPrefixId:"fas",defaultStyleId:"solid",styleIds:["solid","regular","light","thin","brands"],futureStyleIds:[],defaultFontWeight:900}],["sharp",{defaultShortPrefixId:"fass",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}],["duotone",{defaultShortPrefixId:"fad",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}],["sharp-duotone",{defaultShortPrefixId:"fasds",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}]]),I6e={classic:{solid:"fas",regular:"far",light:"fal",thin:"fat",brands:"fab"},duotone:{solid:"fad",regular:"fadr",light:"fadl",thin:"fadt"},sharp:{solid:"fass",regular:"fasr",light:"fasl",thin:"fast"},"sharp-duotone":{solid:"fasd
|
|||
|
--fa-font-solid: normal 900 1em/1 "Font Awesome 6 Free";
|
|||
|
--fa-font-regular: normal 400 1em/1 "Font Awesome 6 Free";
|
|||
|
--fa-font-light: normal 300 1em/1 "Font Awesome 6 Pro";
|
|||
|
--fa-font-thin: normal 100 1em/1 "Font Awesome 6 Pro";
|
|||
|
--fa-font-duotone: normal 900 1em/1 "Font Awesome 6 Duotone";
|
|||
|
--fa-font-duotone-regular: normal 400 1em/1 "Font Awesome 6 Duotone";
|
|||
|
--fa-font-duotone-light: normal 300 1em/1 "Font Awesome 6 Duotone";
|
|||
|
--fa-font-duotone-thin: normal 100 1em/1 "Font Awesome 6 Duotone";
|
|||
|
--fa-font-brands: normal 400 1em/1 "Font Awesome 6 Brands";
|
|||
|
--fa-font-sharp-solid: normal 900 1em/1 "Font Awesome 6 Sharp";
|
|||
|
--fa-font-sharp-regular: normal 400 1em/1 "Font Awesome 6 Sharp";
|
|||
|
--fa-font-sharp-light: normal 300 1em/1 "Font Awesome 6 Sharp";
|
|||
|
--fa-font-sharp-thin: normal 100 1em/1 "Font Awesome 6 Sharp";
|
|||
|
--fa-font-sharp-duotone-solid: normal 900 1em/1 "Font Awesome 6 Sharp Duotone";
|
|||
|
--fa-font-sharp-duotone-regular: normal 400 1em/1 "Font Awesome 6 Sharp Duotone";
|
|||
|
--fa-font-sharp-duotone-light: normal 300 1em/1 "Font Awesome 6 Sharp Duotone";
|
|||
|
--fa-font-sharp-duotone-thin: normal 100 1em/1 "Font Awesome 6 Sharp Duotone";
|
|||
|
}
|
|||
|
|
|||
|
svg:not(:root).svg-inline--fa, svg:not(:host).svg-inline--fa {
|
|||
|
overflow: visible;
|
|||
|
box-sizing: content-box;
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa {
|
|||
|
display: var(--fa-display, inline-block);
|
|||
|
height: 1em;
|
|||
|
overflow: visible;
|
|||
|
vertical-align: -0.125em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-2xs {
|
|||
|
vertical-align: 0.1em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-xs {
|
|||
|
vertical-align: 0em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-sm {
|
|||
|
vertical-align: -0.0714285705em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-lg {
|
|||
|
vertical-align: -0.2em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-xl {
|
|||
|
vertical-align: -0.25em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-2xl {
|
|||
|
vertical-align: -0.3125em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-pull-left {
|
|||
|
margin-right: var(--fa-pull-margin, 0.3em);
|
|||
|
width: auto;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-pull-right {
|
|||
|
margin-left: var(--fa-pull-margin, 0.3em);
|
|||
|
width: auto;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-li {
|
|||
|
width: var(--fa-li-width, 2em);
|
|||
|
top: 0.25em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-fw {
|
|||
|
width: var(--fa-fw-width, 1.25em);
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers svg.svg-inline--fa {
|
|||
|
bottom: 0;
|
|||
|
left: 0;
|
|||
|
margin: auto;
|
|||
|
position: absolute;
|
|||
|
right: 0;
|
|||
|
top: 0;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-counter, .fa-layers-text {
|
|||
|
display: inline-block;
|
|||
|
position: absolute;
|
|||
|
text-align: center;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers {
|
|||
|
display: inline-block;
|
|||
|
height: 1em;
|
|||
|
position: relative;
|
|||
|
text-align: center;
|
|||
|
vertical-align: -0.125em;
|
|||
|
width: 1em;
|
|||
|
}
|
|||
|
.fa-layers svg.svg-inline--fa {
|
|||
|
transform-origin: center center;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-text {
|
|||
|
left: 50%;
|
|||
|
top: 50%;
|
|||
|
transform: translate(-50%, -50%);
|
|||
|
transform-origin: center center;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-counter {
|
|||
|
background-color: var(--fa-counter-background-color, #ff253a);
|
|||
|
border-radius: var(--fa-counter-border-radius, 1em);
|
|||
|
box-sizing: border-box;
|
|||
|
color: var(--fa-inverse, #fff);
|
|||
|
line-height: var(--fa-counter-line-height, 1);
|
|||
|
max-width: var(--fa-counter-max-width, 5em);
|
|||
|
min-width: var(--fa-counter-min-width, 1.5em);
|
|||
|
overflow: hidden;
|
|||
|
padding: var(--fa-counter-padding, 0.25em 0.5em);
|
|||
|
right: var(--fa-right, 0);
|
|||
|
text-overflow: ellipsis;
|
|||
|
top: var(--fa-top, 0);
|
|||
|
transform: scale(var(--fa-counter-scale, 0.25));
|
|||
|
transform-origin: top right;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-bottom-right {
|
|||
|
bottom: var(--fa-bottom, 0);
|
|||
|
right: var(--fa-right, 0);
|
|||
|
top: auto;
|
|||
|
transform: scale(var(--fa-layers-scale, 0.25));
|
|||
|
transform-origin: bottom right;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-bottom-left {
|
|||
|
bottom: var(--fa-bottom, 0);
|
|||
|
left: var(--fa-left, 0);
|
|||
|
right: auto;
|
|||
|
top: auto;
|
|||
|
transform: scale(var(--fa-layers-scale, 0.25));
|
|||
|
transform-origin: bottom left;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-top-right {
|
|||
|
top: var(--fa-top, 0);
|
|||
|
right: var(--fa-right, 0);
|
|||
|
transform: scale(var(--fa-layers-scale, 0.25));
|
|||
|
transform-origin: top right;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-top-left {
|
|||
|
left: var(--fa-left, 0);
|
|||
|
right: auto;
|
|||
|
top: var(--fa-top, 0);
|
|||
|
transform: scale(var(--fa-layers-scale, 0.25));
|
|||
|
transform-origin: top left;
|
|||
|
}
|
|||
|
|
|||
|
.fa-1x {
|
|||
|
font-size: 1em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-2x {
|
|||
|
font-size: 2em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-3x {
|
|||
|
font-size: 3em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-4x {
|
|||
|
font-size: 4em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-5x {
|
|||
|
font-size: 5em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-6x {
|
|||
|
font-size: 6em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-7x {
|
|||
|
font-size: 7em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-8x {
|
|||
|
font-size: 8em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-9x {
|
|||
|
font-size: 9em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-10x {
|
|||
|
font-size: 10em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-2xs {
|
|||
|
font-size: 0.625em;
|
|||
|
line-height: 0.1em;
|
|||
|
vertical-align: 0.225em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-xs {
|
|||
|
font-size: 0.75em;
|
|||
|
line-height: 0.0833333337em;
|
|||
|
vertical-align: 0.125em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-sm {
|
|||
|
font-size: 0.875em;
|
|||
|
line-height: 0.0714285718em;
|
|||
|
vertical-align: 0.0535714295em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-lg {
|
|||
|
font-size: 1.25em;
|
|||
|
line-height: 0.05em;
|
|||
|
vertical-align: -0.075em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-xl {
|
|||
|
font-size: 1.5em;
|
|||
|
line-height: 0.0416666682em;
|
|||
|
vertical-align: -0.125em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-2xl {
|
|||
|
font-size: 2em;
|
|||
|
line-height: 0.03125em;
|
|||
|
vertical-align: -0.1875em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-fw {
|
|||
|
text-align: center;
|
|||
|
width: 1.25em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-ul {
|
|||
|
list-style-type: none;
|
|||
|
margin-left: var(--fa-li-margin, 2.5em);
|
|||
|
padding-left: 0;
|
|||
|
}
|
|||
|
.fa-ul > li {
|
|||
|
position: relative;
|
|||
|
}
|
|||
|
|
|||
|
.fa-li {
|
|||
|
left: calc(-1 * var(--fa-li-width, 2em));
|
|||
|
position: absolute;
|
|||
|
text-align: center;
|
|||
|
width: var(--fa-li-width, 2em);
|
|||
|
line-height: inherit;
|
|||
|
}
|
|||
|
|
|||
|
.fa-border {
|
|||
|
border-color: var(--fa-border-color, #eee);
|
|||
|
border-radius: var(--fa-border-radius, 0.1em);
|
|||
|
border-style: var(--fa-border-style, solid);
|
|||
|
border-width: var(--fa-border-width, 0.08em);
|
|||
|
padding: var(--fa-border-padding, 0.2em 0.25em 0.15em);
|
|||
|
}
|
|||
|
|
|||
|
.fa-pull-left {
|
|||
|
float: left;
|
|||
|
margin-right: var(--fa-pull-margin, 0.3em);
|
|||
|
}
|
|||
|
|
|||
|
.fa-pull-right {
|
|||
|
float: right;
|
|||
|
margin-left: var(--fa-pull-margin, 0.3em);
|
|||
|
}
|
|||
|
|
|||
|
.fa-beat {
|
|||
|
animation-name: fa-beat;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, ease-in-out);
|
|||
|
}
|
|||
|
|
|||
|
.fa-bounce {
|
|||
|
animation-name: fa-bounce;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.28, 0.84, 0.42, 1));
|
|||
|
}
|
|||
|
|
|||
|
.fa-fade {
|
|||
|
animation-name: fa-fade;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.4, 0, 0.6, 1));
|
|||
|
}
|
|||
|
|
|||
|
.fa-beat-fade {
|
|||
|
animation-name: fa-beat-fade;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.4, 0, 0.6, 1));
|
|||
|
}
|
|||
|
|
|||
|
.fa-flip {
|
|||
|
animation-name: fa-flip;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, ease-in-out);
|
|||
|
}
|
|||
|
|
|||
|
.fa-shake {
|
|||
|
animation-name: fa-shake;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, linear);
|
|||
|
}
|
|||
|
|
|||
|
.fa-spin {
|
|||
|
animation-name: fa-spin;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 2s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, linear);
|
|||
|
}
|
|||
|
|
|||
|
.fa-spin-reverse {
|
|||
|
--fa-animation-direction: reverse;
|
|||
|
}
|
|||
|
|
|||
|
.fa-pulse,
|
|||
|
.fa-spin-pulse {
|
|||
|
animation-name: fa-spin;
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, steps(8));
|
|||
|
}
|
|||
|
|
|||
|
@media (prefers-reduced-motion: reduce) {
|
|||
|
.fa-beat,
|
|||
|
.fa-bounce,
|
|||
|
.fa-fade,
|
|||
|
.fa-beat-fade,
|
|||
|
.fa-flip,
|
|||
|
.fa-pulse,
|
|||
|
.fa-shake,
|
|||
|
.fa-spin,
|
|||
|
.fa-spin-pulse {
|
|||
|
animation-delay: -1ms;
|
|||
|
animation-duration: 1ms;
|
|||
|
animation-iteration-count: 1;
|
|||
|
transition-delay: 0s;
|
|||
|
transition-duration: 0s;
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-beat {
|
|||
|
0%, 90% {
|
|||
|
transform: scale(1);
|
|||
|
}
|
|||
|
45% {
|
|||
|
transform: scale(var(--fa-beat-scale, 1.25));
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-bounce {
|
|||
|
0% {
|
|||
|
transform: scale(1, 1) translateY(0);
|
|||
|
}
|
|||
|
10% {
|
|||
|
transform: scale(var(--fa-bounce-start-scale-x, 1.1), var(--fa-bounce-start-scale-y, 0.9)) translateY(0);
|
|||
|
}
|
|||
|
30% {
|
|||
|
transform: scale(var(--fa-bounce-jump-scale-x, 0.9), var(--fa-bounce-jump-scale-y, 1.1)) translateY(var(--fa-bounce-height, -0.5em));
|
|||
|
}
|
|||
|
50% {
|
|||
|
transform: scale(var(--fa-bounce-land-scale-x, 1.05), var(--fa-bounce-land-scale-y, 0.95)) translateY(0);
|
|||
|
}
|
|||
|
57% {
|
|||
|
transform: scale(1, 1) translateY(var(--fa-bounce-rebound, -0.125em));
|
|||
|
}
|
|||
|
64% {
|
|||
|
transform: scale(1, 1) translateY(0);
|
|||
|
}
|
|||
|
100% {
|
|||
|
transform: scale(1, 1) translateY(0);
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-fade {
|
|||
|
50% {
|
|||
|
opacity: var(--fa-fade-opacity, 0.4);
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-beat-fade {
|
|||
|
0%, 100% {
|
|||
|
opacity: var(--fa-beat-fade-opacity, 0.4);
|
|||
|
transform: scale(1);
|
|||
|
}
|
|||
|
50% {
|
|||
|
opacity: 1;
|
|||
|
transform: scale(var(--fa-beat-fade-scale, 1.125));
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-flip {
|
|||
|
50% {
|
|||
|
transform: rotate3d(var(--fa-flip-x, 0), var(--fa-flip-y, 1), var(--fa-flip-z, 0), var(--fa-flip-angle, -180deg));
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-shake {
|
|||
|
0% {
|
|||
|
transform: rotate(-15deg);
|
|||
|
}
|
|||
|
4% {
|
|||
|
transform: rotate(15deg);
|
|||
|
}
|
|||
|
8%, 24% {
|
|||
|
transform: rotate(-18deg);
|
|||
|
}
|
|||
|
12%, 28% {
|
|||
|
transform: rotate(18deg);
|
|||
|
}
|
|||
|
16% {
|
|||
|
transform: rotate(-22deg);
|
|||
|
}
|
|||
|
20% {
|
|||
|
transform: rotate(22deg);
|
|||
|
}
|
|||
|
32% {
|
|||
|
transform: rotate(-12deg);
|
|||
|
}
|
|||
|
36% {
|
|||
|
transform: rotate(12deg);
|
|||
|
}
|
|||
|
40%, 100% {
|
|||
|
transform: rotate(0deg);
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-spin {
|
|||
|
0% {
|
|||
|
transform: rotate(0deg);
|
|||
|
}
|
|||
|
100% {
|
|||
|
transform: rotate(360deg);
|
|||
|
}
|
|||
|
}
|
|||
|
.fa-rotate-90 {
|
|||
|
transform: rotate(90deg);
|
|||
|
}
|
|||
|
|
|||
|
.fa-rotate-180 {
|
|||
|
transform: rotate(180deg);
|
|||
|
}
|
|||
|
|
|||
|
.fa-rotate-270 {
|
|||
|
transform: rotate(270deg);
|
|||
|
}
|
|||
|
|
|||
|
.fa-flip-horizontal {
|
|||
|
transform: scale(-1, 1);
|
|||
|
}
|
|||
|
|
|||
|
.fa-flip-vertical {
|
|||
|
transform: scale(1, -1);
|
|||
|
}
|
|||
|
|
|||
|
.fa-flip-both,
|
|||
|
.fa-flip-horizontal.fa-flip-vertical {
|
|||
|
transform: scale(-1, -1);
|
|||
|
}
|
|||
|
|
|||
|
.fa-rotate-by {
|
|||
|
transform: rotate(var(--fa-rotate-angle, 0));
|
|||
|
}
|
|||
|
|
|||
|
.fa-stack {
|
|||
|
display: inline-block;
|
|||
|
vertical-align: middle;
|
|||
|
height: 2em;
|
|||
|
position: relative;
|
|||
|
width: 2.5em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-stack-1x,
|
|||
|
.fa-stack-2x {
|
|||
|
bottom: 0;
|
|||
|
left: 0;
|
|||
|
margin: auto;
|
|||
|
position: absolute;
|
|||
|
right: 0;
|
|||
|
top: 0;
|
|||
|
z-index: var(--fa-stack-z-index, auto);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa.fa-stack-1x {
|
|||
|
height: 1em;
|
|||
|
width: 1.25em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-stack-2x {
|
|||
|
height: 2em;
|
|||
|
width: 2.5em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-inverse {
|
|||
|
color: var(--fa-inverse, #fff);
|
|||
|
}
|
|||
|
|
|||
|
.sr-only,
|
|||
|
.fa-sr-only {
|
|||
|
position: absolute;
|
|||
|
width: 1px;
|
|||
|
height: 1px;
|
|||
|
padding: 0;
|
|||
|
margin: -1px;
|
|||
|
overflow: hidden;
|
|||
|
clip: rect(0, 0, 0, 0);
|
|||
|
white-space: nowrap;
|
|||
|
border-width: 0;
|
|||
|
}
|
|||
|
|
|||
|
.sr-only-focusable:not(:focus),
|
|||
|
.fa-sr-only-focusable:not(:focus) {
|
|||
|
position: absolute;
|
|||
|
width: 1px;
|
|||
|
height: 1px;
|
|||
|
padding: 0;
|
|||
|
margin: -1px;
|
|||
|
overflow: hidden;
|
|||
|
clip: rect(0, 0, 0, 0);
|
|||
|
white-space: nowrap;
|
|||
|
border-width: 0;
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa .fa-primary {
|
|||
|
fill: var(--fa-primary-color, currentColor);
|
|||
|
opacity: var(--fa-primary-opacity, 1);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa .fa-secondary {
|
|||
|
fill: var(--fa-secondary-color, currentColor);
|
|||
|
opacity: var(--fa-secondary-opacity, 0.4);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa.fa-swap-opacity .fa-primary {
|
|||
|
opacity: var(--fa-secondary-opacity, 0.4);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa.fa-swap-opacity .fa-secondary {
|
|||
|
opacity: var(--fa-primary-opacity, 1);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa mask .fa-primary,
|
|||
|
.svg-inline--fa mask .fa-secondary {
|
|||
|
fill: black;
|
|||
|
}`;function wE(){const e=pE,t=mE,i=mt.cssPrefix,n=mt.replacementClass;let a=dDe;if(i!==e||n!==t){const s=new RegExp("\\.".concat(e,"\\-"),"g"),r=new RegExp("\\--".concat(e,"\\-"),"g"),o=new RegExp("\\.".concat(t),"g");a=a.replace(s,".".concat(i,"-")).replace(r,"--".concat(i,"-")).replace(o,".".concat(n))}return a}let D2=!1;function Mp(){mt.autoAddCss&&!D2&&(rDe(wE()),D2=!0)}var fDe={mixout(){return{dom:{css:wE,insertCss:Mp}}},hooks(){return{beforeDOMElementCreation(){Mp()},beforeI2svg(){Mp()}}}};const Rs=Cr||{};Rs[Ls]||(Rs[Ls]={});Rs[Ls].styles||(Rs[Ls].styles={});Rs[Ls].hooks||(Rs[Ls].hooks={});Rs[Ls].shims||(Rs[Ls].shims=[]);var Va=Rs[Ls];const CE=[],EE=function(){_i.removeEventListener("DOMContentLoaded",EE),Zh=1,CE.map(e=>e())};let Zh=!1;Hs&&(Zh=(_i.documentElement.doScroll?/^loaded|^c/:/^loaded|^i|^c/).test(_i.readyState),Zh||_i.addEventListener("DOMContentLoaded",EE));function hDe(e){Hs&&(Zh?setTimeout(e,0):CE.push(e))}function dd(e){const{tag:t,attributes:i={},children:n=[]}=e;return typeof e=="string"?yE(e):"<".concat(t," ").concat(lDe(i),">").concat(n.map(dd).join(""),"</").concat(t,">")}function x2(e,t,i){if(e&&e[t]&&e[t][i])return{prefix:t,iconName:i,icon:e[t][i]}}var Lp=function(t,i,n,a){var s=Object.keys(t),r=s.length,o=i,l,u,f;for(n===void 0?(l=1,f=t[s[0]]):(l=0,f=n);l<r;l++)u=s[l],f=o(f,t[u],u,t);return f};function ADe(e){const t=[];let i=0;const n=e.length;for(;i<n;){const a=e.charCodeAt(i++);if(a>=55296&&a<=56319&&i<n){const s=e.charCodeAt(i++);(s&64512)==56320?t.push(((a&1023)<<10)+(s&1023)+65536):(t.push(a),i--)}else t.push(a)}return t}function L_(e){const t=ADe(e);return t.length===1?t[0].toString(16):null}function gDe(e,t){const i=e.length;let n=e.charCodeAt(t),a;return n>=55296&&n<=56319&&i>t+1&&(a=e.charCodeAt(t+1),a>=56320&&a<=57343)?(n-55296)*1024+a-56320+65536:n}function T2(e){return Object.keys(e).reduce((t,i)=>{const n=e[i];return!!n.icon?t[n.iconName]=n.icon:t[i]=n,t},{})}function R_(e,t){let i=arguments.length>2&&arguments[2]!==void 0?arguments[2]:{};const{skipHooks:n=!1}=i,a=T2(t);typeof Va.hooks.addPack=="function"&&!n?Va.hooks.addPack(e,T2(t)):Va.styles[e]=Ke(Ke({},Va.styles[e]||{}),a),e==="fas"&&R_("fa",t)}const{styles:Yc,shims:pDe}=Va,kE=Object.keys($1),mDe=kE.reduce((e,t)=>(e[t]=Object.keys($1[t]),e),{});let i0=null,BE={},SE={},DE={},xE={},TE={};function _De(e){return~iDe.indexOf(e)}function FDe(e,t){const i=t.split("-"),n=i[0],a=i.slice(1).join("-");return n===e&&a!==""&&!_De(a)?a:null}const IE=()=>{const e=n=>Lp(Yc,(a,s,r)=>(a[r]=Lp(s,n,{}),a),{});BE=e((n,a,s)=>(a[3]&&(n[a[3]]=s),a[2]&&a[2].filter(o=>typeof o=="number").forEach(o=>{n[o.toString(16)]=s}),n)),SE=e((n,a,s)=>(n[s]=s,a[2]&&a[2].filter(o=>typeof o=="string").forEach(o=>{n[o]=s}),n)),TE=e((n,a,s)=>{const r=a[2];return n[s]=s,r.forEach(o=>{n[o]=s}),n});const t="far"in Yc||mt.autoFetchSvg,i=Lp(pDe,(n,a)=>{const s=a[0];let r=a[1];const o=a[2];return r==="far"&&!t&&(r="fas"),typeof s=="string"&&(n.names[s]={prefix:r,iconName:o}),typeof s=="number"&&(n.unicodes[s.toString(16)]={prefix:r,iconName:o}),n},{names:{},unicodes:{}});DE=i.names,xE=i.unicodes,i0=KA(mt.styleDefault,{family:mt.familyDefault})};sDe(e=>{i0=KA(e.styleDefault,{family:mt.familyDefault})});IE();function n0(e,t){return(BE[e]||{})[t]}function bDe(e,t){return(SE[e]||{})[t]}function $r(e,t){return(TE[e]||{})[t]}function PE(e){return DE[e]||{prefix:null,iconName:null}}function vDe(e){const t=xE[e],i=n0("fas",e);return t||(i?{prefix:"fas",iconName:i}:null)||{prefix:null,iconName:null}}function Er(){return i0}const ME=()=>({prefix:null,iconName:null,rest:[]});function yDe(e){let t=bn;const i=kE.reduce((n,a)=>(n[a]="".concat(mt.cssPrefix,"-").concat(a),n),{});return AE.forEach(n=>{(e.includes(i[n])||e.some(a=>mDe[n].includes(a)))&&(t=n)}),t}function KA(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};const{family:i=bn}=t,n=J6e[i][e];if(i===VA&&!e)return"fad";const a=S2[i][e]||S2[i][n],s=e in Va.styles?e:null;return a||s||null}function wDe(e){let t=[],i=null;return e.forEach(n=>{const a=FDe(mt.cssPrefix,n);a?i=a:n&&t.push(n)}),{iconName:i,rest:t}}funct
|
|||
|
`);t.setAttribute(bo,""),t.innerHTML=a}};function U2(e){e()}function OE(e,t){const i=typeof t=="function"?t:xf;if(e.length===0)i();else{let n=U2;mt.mutateApproach===Y6e&&(n=Cr.requestAnimationFrame||U2),n(()=>{const a=GDe(),s=s0.begin("mutate");e.map(a),s(),i()})}}let r0=!1;function NE(){r0=!0}function j_(){r0=!1}let $h=null;function O2(e){if(!C2||!mt.observeMutations)return;const{treeCallback:t=xf,nodeCallback:i=xf,pseudoElementsCallback:n=xf,observeMutationsRoot:a=_i}=e;$h=new C2(s=>{if(r0)return;const r=Er();Vl(s).forEach(o=>{if(o.type==="childList"&&o.addedNodes.length>0&&!R2(o.addedNodes[0])&&(mt.searchPseudoElements&&n(o.target),t(o.target)),o.type==="attributes"&&o.target.parentNode&&mt.searchPseudoElements&&n(o.target.parentNode),o.type==="attributes"&&R2(o.target)&&~tDe.indexOf(o.attributeName))if(o.attributeName==="class"&&QDe(o.target)){const{prefix:l,iconName:u}=YA(e0(o.target));o.target.setAttribute(J1,l||r),u&&o.target.setAttribute(Z1,u)}else jDe(o.target)&&i(o.target)})}),Hs&&$h.observe(a,{childList:!0,attributes:!0,characterData:!0,subtree:!0})}function WDe(){$h&&$h.disconnect()}function KDe(e){const t=e.getAttribute("style");let i=[];return t&&(i=t.split(";").reduce((n,a)=>{const s=a.split(":"),r=s[0],o=s.slice(1);return r&&o.length>0&&(n[r]=o.join(":").trim()),n},{})),i}function YDe(e){const t=e.getAttribute("data-prefix"),i=e.getAttribute("data-icon"),n=e.innerText!==void 0?e.innerText.trim():"";let a=YA(e0(e));return a.prefix||(a.prefix=Er()),t&&i&&(a.prefix=t,a.iconName=i),a.iconName&&a.prefix||(a.prefix&&n.length>0&&(a.iconName=bDe(a.prefix,e.innerText)||n0(a.prefix,L_(e.innerText))),!a.iconName&&mt.autoFetchSvg&&e.firstChild&&e.firstChild.nodeType===Node.TEXT_NODE&&(a.iconName=e.firstChild.data)),a}function XDe(e){const t=Vl(e.attributes).reduce((a,s)=>(a.name!=="class"&&a.name!=="style"&&(a[s.name]=s.value),a),{}),i=e.getAttribute("title"),n=e.getAttribute("data-fa-title-id");return mt.autoA11y&&(i?t["aria-labelledby"]="".concat(mt.replacementClass,"-title-").concat(n||Kc()):(t["aria-hidden"]="true",t.focusable="false")),t}function JDe(){return{iconName:null,title:null,titleId:null,prefix:null,transform:qa,symbol:!1,mask:{iconName:null,prefix:null,rest:[]},maskId:null,extra:{classes:[],styles:{},attributes:{}}}}function N2(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{styleParser:!0};const{iconName:i,prefix:n,rest:a}=YDe(e),s=XDe(e),r=U_("parseNodeAttributes",{},e);let o=t.styleParser?KDe(e):[];return Ke({iconName:i,title:e.getAttribute("title"),titleId:e.getAttribute("data-fa-title-id"),prefix:n,transform:qa,mask:{iconName:null,prefix:null,rest:[]},maskId:null,symbol:!1,extra:{classes:a,styles:o,attributes:s}},r)}const{styles:ZDe}=Va;function HE(e){const t=mt.autoReplaceSvg==="nest"?N2(e,{styleParser:!1}):N2(e);return~t.extra.classes.indexOf(bE)?kr("generateLayersText",e,t):kr("generateSvgReplacementMutation",e,t)}function $De(){return[...P6e,...D_]}function H2(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:null;if(!Hs)return Promise.resolve();const i=_i.documentElement.classList,n=f=>i.add("".concat(B2,"-").concat(f)),a=f=>i.remove("".concat(B2,"-").concat(f)),s=mt.autoFetchSvg?$De():hE.concat(Object.keys(ZDe));s.includes("fa")||s.push("fa");const r=[".".concat(bE,":not([").concat(bo,"])")].concat(s.map(f=>".".concat(f,":not([").concat(bo,"])"))).join(", ");if(r.length===0)return Promise.resolve();let o=[];try{o=Vl(e.querySelectorAll(r))}catch{}if(o.length>0)n("pending"),a("complete");else return Promise.resolve();const l=s0.begin("onTree"),u=o.reduce((f,h)=>{try{const A=HE(h);A&&f.push(A)}catch(A){_E||A.name==="MissingIcon"&&console.error(A)}return f},[]);return new Promise((f,h)=>{Promise.all(u).then(A=>{OE(A,()=>{n("active"),n("complete"),a("pending"),typeof t=="function"&&t(),l(),f()})}).catch(A=>{l(),h(A)})})}function exe(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:null;HE(e).then(i=>{i&&OE([i],t)})}function txe(e){return function(t){let i=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};const n=(t||{}).icon?t:O_(t||{});let{m
|
|||
|
`),e.removeAttribute(i),n()}).catch(a)}else n()}else n()})}function dxe(e){return Promise.all([G2(e,"::before"),G2(e,"::after")])}function fxe(e){return e.parentNode!==document.head&&!~X6e.indexOf(e.tagName.toUpperCase())&&!e.getAttribute(T_)&&(!e.parentNode||e.parentNode.tagName!=="svg")}function z2(e){if(Hs)return new Promise((t,i)=>{const n=Vl(e.querySelectorAll("*")).filter(fxe).map(dxe),a=s0.begin("searchPseudoElements");NE(),Promise.all(n).then(()=>{a(),j_(),t()}).catch(()=>{a(),j_(),i()})})}var hxe={hooks(){return{mutationObserverCallbacks(e){return e.pseudoElementsCallback=z2,e}}},provides(e){e.pseudoElements2svg=function(t){const{node:i=_i}=t;mt.searchPseudoElements&&z2(i)}}};let q2=!1;var Axe={mixout(){return{dom:{unwatch(){NE(),q2=!0}}}},hooks(){return{bootstrap(){O2(U_("mutationObserverCallbacks",{}))},noAuto(){WDe()},watch(e){const{observeMutationsRoot:t}=e;q2?j_():O2(U_("mutationObserverCallbacks",{observeMutationsRoot:t}))}}}};const V2=e=>{let t={size:16,x:0,y:0,flipX:!1,flipY:!1,rotate:0};return e.toLowerCase().split(" ").reduce((i,n)=>{const a=n.toLowerCase().split("-"),s=a[0];let r=a.slice(1).join("-");if(s&&r==="h")return i.flipX=!0,i;if(s&&r==="v")return i.flipY=!0,i;if(r=parseFloat(r),isNaN(r))return i;switch(s){case"grow":i.size=i.size+r;break;case"shrink":i.size=i.size-r;break;case"left":i.x=i.x-r;break;case"right":i.x=i.x+r;break;case"up":i.y=i.y-r;break;case"down":i.y=i.y+r;break;case"rotate":i.rotate=i.rotate+r;break}return i},t)};var gxe={mixout(){return{parse:{transform:e=>V2(e)}}},hooks(){return{parseNodeAttributes(e,t){const i=t.getAttribute("data-fa-transform");return i&&(e.transform=V2(i)),e}}},provides(e){e.generateAbstractTransformGrouping=function(t){let{main:i,transform:n,containerWidth:a,iconWidth:s}=t;const r={transform:"translate(".concat(a/2," 256)")},o="translate(".concat(n.x*32,", ").concat(n.y*32,") "),l="scale(".concat(n.size/16*(n.flipX?-1:1),", ").concat(n.size/16*(n.flipY?-1:1),") "),u="rotate(".concat(n.rotate," 0 0)"),f={transform:"".concat(o," ").concat(l," ").concat(u)},h={transform:"translate(".concat(s/2*-1," -256)")},A={outer:r,inner:f,path:h};return{tag:"g",attributes:Ke({},A.outer),children:[{tag:"g",attributes:Ke({},A.inner),children:[{tag:i.icon.tag,children:i.icon.children,attributes:Ke(Ke({},i.icon.attributes),A.path)}]}]}}}};const Up={x:0,y:0,width:"100%",height:"100%"};function W2(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:!0;return e.attributes&&(e.attributes.fill||t)&&(e.attributes.fill="black"),e}function pxe(e){return e.tag==="g"?e.children:[e]}var mxe={hooks(){return{parseNodeAttributes(e,t){const i=t.getAttribute("data-fa-mask"),n=i?YA(i.split(" ").map(a=>a.trim())):ME();return n.prefix||(n.prefix=Er()),e.mask=n,e.maskId=t.getAttribute("data-fa-mask-id"),e}}},provides(e){e.generateAbstractMask=function(t){let{children:i,attributes:n,main:a,mask:s,maskId:r,transform:o}=t;const{width:l,icon:u}=a,{width:f,icon:h}=s,A=uDe({transform:o,containerWidth:f,iconWidth:l}),p={tag:"rect",attributes:Ke(Ke({},Up),{},{fill:"white"})},F=u.children?{children:u.children.map(W2)}:{},y={tag:"g",attributes:Ke({},A.inner),children:[W2(Ke({tag:u.tag,attributes:Ke(Ke({},u.attributes),A.path)},F))]},E={tag:"g",attributes:Ke({},A.outer),children:[y]},w="mask-".concat(r||Kc()),C="clip-".concat(r||Kc()),B={tag:"mask",attributes:Ke(Ke({},Up),{},{id:w,maskUnits:"userSpaceOnUse",maskContentUnits:"userSpaceOnUse"}),children:[p,E]},S={tag:"defs",children:[{tag:"clipPath",attributes:{id:C},children:pxe(h)},B]};return i.push(S,{tag:"rect",attributes:Ke({fill:"currentColor","clip-path":"url(#".concat(C,")"),mask:"url(#".concat(w,")")},Up)}),{children:i,attributes:n}}}},_xe={provides(e){let t=!1;Cr.matchMedia&&(t=Cr.matchMedia("(prefers-reduced-motion: reduce)").matches),e.missingIconAbstract=function(){const i=[],n={fill:"currentColor"},a={attributeType:"XML",repeatCount:"indefinite",dur:"2s"};i.push({tag:"path",attributes:Ke(Ke({},n),{},{d:"M156.5,447.7l-12.6,29.5c-18.7-9.5-35.9-21.2-51.5-34.9l22.7-22.7C127.6,430.5,141.5,440,156.5,447.7z M40.6,272H8.5 c1.4,21.2,5.4,41
|
|||
|
* Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com
|
|||
|
* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
|
|||
|
* Copyright 2024 Fonticons, Inc.
|
|||
|
*/const Lxe={prefix:"fas",iconName:"forward-step",icon:[320,512,["step-forward"],"f051","M52.5 440.6c-9.5 7.9-22.8 9.7-34.1 4.4S0 428.4 0 416L0 96C0 83.6 7.2 72.3 18.4 67s24.5-3.6 34.1 4.4l192 160L256 241l0-145c0-17.7 14.3-32 32-32s32 14.3 32 32l0 320c0 17.7-14.3 32-32 32s-32-14.3-32-32l0-145-11.5 9.6-192 160z"]},Rxe=Lxe,Uxe={prefix:"fas",iconName:"address-book",icon:[512,512,[62138,"contact-book"],"f2b9","M96 0C60.7 0 32 28.7 32 64l0 384c0 35.3 28.7 64 64 64l288 0c35.3 0 64-28.7 64-64l0-384c0-35.3-28.7-64-64-64L96 0zM208 288l64 0c44.2 0 80 35.8 80 80c0 8.8-7.2 16-16 16l-192 0c-8.8 0-16-7.2-16-16c0-44.2 35.8-80 80-80zm-32-96a64 64 0 1 1 128 0 64 64 0 1 1 -128 0zM512 80c0-8.8-7.2-16-16-16s-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64zM496 192c-8.8 0-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64c0-8.8-7.2-16-16-16zm16 144c0-8.8-7.2-16-16-16s-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64z"]},Oxe={prefix:"fas",iconName:"backward",icon:[512,512,[9194],"f04a","M459.5 440.6c9.5 7.9 22.8 9.7 34.1 4.4s18.4-16.6 18.4-29l0-320c0-12.4-7.2-23.7-18.4-29s-24.5-3.6-34.1 4.4L288 214.3l0 41.7 0 41.7L459.5 440.6zM256 352l0-96 0-128 0-32c0-12.4-7.2-23.7-18.4-29s-24.5-3.6-34.1 4.4l-192 160C4.2 237.5 0 246.5 0 256s4.2 18.5 11.5 24.6l192 160c9.5 7.9 22.8 9.7 34.1 4.4s18.4-16.6 18.4-29l0-64z"]},Nxe={prefix:"fas",iconName:"volume-low",icon:[448,512,[128264,"volume-down"],"f027","M301.1 34.8C312.6 40 320 51.4 320 64l0 384c0 12.6-7.4 24-18.9 29.2s-25 3.1-34.4-5.3L131.8 352 64 352c-35.3 0-64-28.7-64-64l0-64c0-35.3 28.7-64 64-64l67.8 0L266.7 40.1c9.4-8.4 22.9-10.4 34.4-5.3zM412.6 181.5C434.1 199.1 448 225.9 448 256s-13.9 56.9-35.4 74.5c-10.3 8.4-25.4 6.8-33.8-3.5s-6.8-25.4 3.5-33.8C393.1 284.4 400 271 400 256s-6.9-28.4-17.7-37.3c-10.3-8.4-11.8-23.5-3.5-33.8s23.5-11.8 33.8-3.5z"]},Hxe={prefix:"fas",iconName:"lock",icon:[448,512,[128274],"f023","M144 144l0 48 160 0 0-48c0-44.2-35.8-80-80-80s-80 35.8-80 80zM80 192l0-48C80 64.5 144.5 0 224 0s144 64.5 144 144l0 48 16 0c35.3 0 64 28.7 64 64l0 192c0 35.3-28.7 64-64 64L64 512c-35.3 0-64-28.7-64-64L0 256c0-35.3 28.7-64 64-64l16 0z"]},Qxe={prefix:"fas",iconName:"angle-right",icon:[320,512,[8250],"f105","M278.6 233.4c12.5 12.5 12.5 32.8 0 45.3l-160 160c-12.5 12.5-32.8 12.5-45.3 0s-12.5-32.8 0-45.3L210.7 256 73.4 118.6c-12.5-12.5-12.5-32.8 0-45.3s32.8-12.5 45.3 0l160 160z"]},jxe={prefix:"fas",iconName:"globe",icon:[512,512,[127760],"f0ac","M352 256c0 22.2-1.2 43.6-3.3 64l-185.3 0c-2.2-20.4-3.3-41.8-3.3-64s1.2-43.6 3.3-64l185.3 0c2.2 20.4 3.3 41.8 3.3 64zm28.8-64l123.1 0c5.3 20.5 8.1 41.9 8.1 64s-2.8 43.5-8.1 64l-123.1 0c2.1-20.6 3.2-42 3.2-64s-1.1-43.4-3.2-64zm112.6-32l-116.7 0c-10-63.9-29.8-117.4-55.3-151.6c78.3 20.7 142 77.5 171.9 151.6zm-149.1 0l-176.6 0c6.1-36.4 15.5-68.6 27-94.7c10.5-23.6 22.2-40.7 33.5-51.5C239.4 3.2 248.7 0 256 0s16.6 3.2 27.8 13.8c11.3 10.8 23 27.9 33.5 51.5c11.6 26 20.9 58.2 27 94.7zm-209 0L18.6 160C48.6 85.9 112.2 29.1 190.6 8.4C165.1 42.6 145.3 96.1 135.3 160zM8.1 192l123.1 0c-2.1 20.6-3.2 42-3.2 64s1.1 43.4 3.2 64L8.1 320C2.8 299.5 0 278.1 0 256s2.8-43.5 8.1-64zM194.7 446.6c-11.6-26-20.9-58.2-27-94.6l176.6 0c-6.1 36.4-15.5 68.6-27 94.6c-10.5 23.6-22.2 40.7-33.5 51.5C272.6 508.8 263.3 512 256 512s-16.6-3.2-27.8-13.8c-11.3-10.8-23-27.9-33.5-51.5zM135.3 352c10 63.9 29.8 117.4 55.3 151.6C112.2 482.9 48.6 426.1 18.6 352l116.7 0zm358.1 0c-30 74.1-93.6 130.9-171.9 151.6c25.5-34.2 45.2-87.7 55.3-151.6l116.7 0z"]},Gxe={prefix:"fas",iconName:"server",icon:[512,512,[],"f233","M64 32C28.7 32 0 60.7 0 96l0 64c0 35.3 28.7 64 64 64l384 0c35.3 0 64-28.7 64-64l0-64c0-35.3-28.7-64-64-64L64 32zm280 72a24 24 0 1 1 0 48 24 24 0 1 1 0-48zm48 24a24 24 0 1 1 48 0 24 24 0 1 1 -48 0zM64 288c-35.3 0-64 28.7-64 64l0 64c0 35.3 28.7 64 64 64l384 0c35.3 0 64-28.7 64-64l0-64c0-35.3-28.7-64-64-64L64 288zm280 72a24 24 0 1 1 0 48 24 24 0 1 1 0-48zm56 24a24 24 0 1 1 48 0 24 24 0 1 1 -48 0z"]},zxe={prefix:"fas",iconName:"pause",icon:[320,512,[9208],"f04c","M48 64C21.5 64 0 85.5 0 112L0 400c0 26.5 21.5 48 48 48l32 0c26.5 0 48-21.5 48-48l0-288c0-26.5-21.5-48-48-48L48 64zm192 0c-26.5 0-48 21.
|