forked from Simnation/Main
4872 lines
3 MiB
JavaScript
4872 lines
3 MiB
JavaScript
![]() |
(function(){const t=document.createElement("link").relList;if(t&&t.supports&&t.supports("modulepreload"))return;for(const a of document.querySelectorAll('link[rel="modulepreload"]'))n(a);new MutationObserver(a=>{for(const s of a)if(s.type==="childList")for(const r of s.addedNodes)r.tagName==="LINK"&&r.rel==="modulepreload"&&n(r)}).observe(document,{childList:!0,subtree:!0});function i(a){const s={};return a.integrity&&(s.integrity=a.integrity),a.referrerPolicy&&(s.referrerPolicy=a.referrerPolicy),a.crossOrigin==="use-credentials"?s.credentials="include":a.crossOrigin==="anonymous"?s.credentials="omit":s.credentials="same-origin",s}function n(a){if(a.ep)return;a.ep=!0;const s=i(a);fetch(a.href,s)}})();/**
|
|||
|
* @vue/shared v3.5.17
|
|||
|
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
|||
|
* @license MIT
|
|||
|
**//*! #__NO_SIDE_EFFECTS__ */function Y_(e){const t=Object.create(null);for(const i of e.split(","))t[i]=1;return i=>i in t}const oi={},nl=[],Ya=()=>{},QE=()=>!1,aA=e=>e.charCodeAt(0)===111&&e.charCodeAt(1)===110&&(e.charCodeAt(2)>122||e.charCodeAt(2)<97),X_=e=>e.startsWith("onUpdate:"),tn=Object.assign,J_=(e,t)=>{const i=e.indexOf(t);i>-1&&e.splice(i,1)},jE=Object.prototype.hasOwnProperty,ei=(e,t)=>jE.call(e,t),vt=Array.isArray,al=e=>sA(e)==="[object Map]",e2=e=>sA(e)==="[object Set]",Et=e=>typeof e=="function",Pi=e=>typeof e=="string",Qs=e=>typeof e=="symbol",bi=e=>e!==null&&typeof e=="object",t2=e=>(bi(e)||Et(e))&&Et(e.then)&&Et(e.catch),i2=Object.prototype.toString,sA=e=>i2.call(e),GE=e=>sA(e).slice(8,-1),n2=e=>sA(e)==="[object Object]",Z_=e=>Pi(e)&&e!=="NaN"&&e[0]!=="-"&&""+parseInt(e,10)===e,xu=Y_(",key,ref,ref_for,ref_key,onVnodeBeforeMount,onVnodeMounted,onVnodeBeforeUpdate,onVnodeUpdated,onVnodeBeforeUnmount,onVnodeUnmounted"),rA=e=>{const t=Object.create(null);return i=>t[i]||(t[i]=e(i))},zE=/-(\w)/g,ga=rA(e=>e.replace(zE,(t,i)=>i?i.toUpperCase():"")),qE=/\B([A-Z])/g,Tr=rA(e=>e.replace(qE,"-$1").toLowerCase()),oA=rA(e=>e.charAt(0).toUpperCase()+e.slice(1)),kg=rA(e=>e?`on${oA(e)}`:""),Ar=(e,t)=>!Object.is(e,t),gf=(e,...t)=>{for(let i=0;i<e.length;i++)e[i](...t)},Gm=(e,t,i,n=!1)=>{Object.defineProperty(e,t,{configurable:!0,enumerable:!1,writable:n,value:i})},zm=e=>{const t=parseFloat(e);return isNaN(t)?e:t},VE=e=>{const t=Pi(e)?Number(e):NaN;return isNaN(t)?e:t};let Z0;const lA=()=>Z0||(Z0=typeof globalThis<"u"?globalThis:typeof self<"u"?self:typeof window<"u"?window:typeof global<"u"?global:{});function et(e){if(vt(e)){const t={};for(let i=0;i<e.length;i++){const n=e[i],a=Pi(n)?XE(n):et(n);if(a)for(const s in a)t[s]=a[s]}return t}else if(Pi(e)||bi(e))return e}const WE=/;(?![^(]*\))/g,KE=/:([^]+)/,YE=/\/\*[^]*?\*\//g;function XE(e){const t={};return e.replace(YE,"").split(WE).forEach(i=>{if(i){const n=i.split(KE);n.length>1&&(t[n[0].trim()]=n[1].trim())}}),t}function H(e){let t="";if(Pi(e))t=e;else if(vt(e))for(let i=0;i<e.length;i++){const n=H(e[i]);n&&(t+=n+" ")}else if(bi(e))for(const i in e)e[i]&&(t+=i+" ");return t.trim()}const JE="itemscope,allowfullscreen,formnovalidate,ismap,nomodule,novalidate,readonly",ZE=Y_(JE);function a2(e){return!!e||e===""}const s2=e=>!!(e&&e.__v_isRef===!0),_=e=>Pi(e)?e:e==null?"":vt(e)||bi(e)&&(e.toString===i2||!Et(e.toString))?s2(e)?_(e.value):JSON.stringify(e,r2,2):String(e),r2=(e,t)=>s2(t)?r2(e,t.value):al(t)?{[`Map(${t.size})`]:[...t.entries()].reduce((i,[n,a],s)=>(i[Eg(n,s)+" =>"]=a,i),{})}:e2(t)?{[`Set(${t.size})`]:[...t.values()].map(i=>Eg(i))}:Qs(t)?Eg(t):bi(t)&&!vt(t)&&!n2(t)?String(t):t,Eg=(e,t="")=>{var i;return Qs(e)?`Symbol(${(i=e.description)!=null?i:t})`:e};/**
|
|||
|
* @vue/reactivity v3.5.17
|
|||
|
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
|||
|
* @license MIT
|
|||
|
**/let An;class o2{constructor(t=!1){this.detached=t,this._active=!0,this._on=0,this.effects=[],this.cleanups=[],this._isPaused=!1,this.parent=An,!t&&An&&(this.index=(An.scopes||(An.scopes=[])).push(this)-1)}get active(){return this._active}pause(){if(this._active){this._isPaused=!0;let t,i;if(this.scopes)for(t=0,i=this.scopes.length;t<i;t++)this.scopes[t].pause();for(t=0,i=this.effects.length;t<i;t++)this.effects[t].pause()}}resume(){if(this._active&&this._isPaused){this._isPaused=!1;let t,i;if(this.scopes)for(t=0,i=this.scopes.length;t<i;t++)this.scopes[t].resume();for(t=0,i=this.effects.length;t<i;t++)this.effects[t].resume()}}run(t){if(this._active){const i=An;try{return An=this,t()}finally{An=i}}}on(){++this._on===1&&(this.prevScope=An,An=this)}off(){this._on>0&&--this._on===0&&(An=this.prevScope,this.prevScope=void 0)}stop(t){if(this._active){this._active=!1;let i,n;for(i=0,n=this.effects.length;i<n;i++)this.effects[i].stop();for(this.effects.length=0,i=0,n=this.cleanups.length;i<n;i++)this.cleanups[i]();if(this.cleanups.length=0,this.scopes){for(i=0,n=this.scopes.length;i<n;i++)this.scopes[i].stop(!0);this.scopes.length=0}if(!this.detached&&this.parent&&!t){const a=this.parent.scopes.pop();a&&a!==this&&(this.parent.scopes[this.index]=a,a.index=this.index)}this.parent=void 0}}}function l2(e){return new o2(e)}function u2(){return An}function $E(e,t=!1){An&&An.cleanups.push(e)}let hi;const Bg=new WeakSet;class c2{constructor(t){this.fn=t,this.deps=void 0,this.depsTail=void 0,this.flags=5,this.next=void 0,this.cleanup=void 0,this.scheduler=void 0,An&&An.active&&An.effects.push(this)}pause(){this.flags|=64}resume(){this.flags&64&&(this.flags&=-65,Bg.has(this)&&(Bg.delete(this),this.trigger()))}notify(){this.flags&2&&!(this.flags&32)||this.flags&8||f2(this)}run(){if(!(this.flags&1))return this.fn();this.flags|=2,$0(this),h2(this);const t=hi,i=Ba;hi=this,Ba=!0;try{return this.fn()}finally{A2(this),hi=t,Ba=i,this.flags&=-3}}stop(){if(this.flags&1){for(let t=this.deps;t;t=t.nextDep)t1(t);this.deps=this.depsTail=void 0,$0(this),this.onStop&&this.onStop(),this.flags&=-2}}trigger(){this.flags&64?Bg.add(this):this.scheduler?this.scheduler():this.runIfDirty()}runIfDirty(){qm(this)&&this.run()}get dirty(){return qm(this)}}let d2=0,Tu,Iu;function f2(e,t=!1){if(e.flags|=8,t){e.next=Iu,Iu=e;return}e.next=Tu,Tu=e}function $_(){d2++}function e1(){if(--d2>0)return;if(Iu){let t=Iu;for(Iu=void 0;t;){const i=t.next;t.next=void 0,t.flags&=-9,t=i}}let e;for(;Tu;){let t=Tu;for(Tu=void 0;t;){const i=t.next;if(t.next=void 0,t.flags&=-9,t.flags&1)try{t.trigger()}catch(n){e||(e=n)}t=i}}if(e)throw e}function h2(e){for(let t=e.deps;t;t=t.nextDep)t.version=-1,t.prevActiveLink=t.dep.activeLink,t.dep.activeLink=t}function A2(e){let t,i=e.depsTail,n=i;for(;n;){const a=n.prevDep;n.version===-1?(n===i&&(i=a),t1(n),eB(n)):t=n,n.dep.activeLink=n.prevActiveLink,n.prevActiveLink=void 0,n=a}e.deps=t,e.depsTail=i}function qm(e){for(let t=e.deps;t;t=t.nextDep)if(t.dep.version!==t.version||t.dep.computed&&(g2(t.dep.computed)||t.dep.version!==t.version))return!0;return!!e._dirty}function g2(e){if(e.flags&4&&!(e.flags&16)||(e.flags&=-17,e.globalVersion===Ju)||(e.globalVersion=Ju,!e.isSSR&&e.flags&128&&(!e.deps&&!e._dirty||!qm(e))))return;e.flags|=2;const t=e.dep,i=hi,n=Ba;hi=e,Ba=!0;try{h2(e);const a=e.fn(e._value);(t.version===0||Ar(a,e._value))&&(e.flags|=128,e._value=a,t.version++)}catch(a){throw t.version++,a}finally{hi=i,Ba=n,A2(e),e.flags&=-3}}function t1(e,t=!1){const{dep:i,prevSub:n,nextSub:a}=e;if(n&&(n.nextSub=a,e.prevSub=void 0),a&&(a.prevSub=n,e.nextSub=void 0),i.subs===e&&(i.subs=n,!n&&i.computed)){i.computed.flags&=-5;for(let s=i.computed.deps;s;s=s.nextDep)t1(s,!0)}!t&&!--i.sc&&i.map&&i.map.delete(i.key)}function eB(e){const{prevDep:t,nextDep:i}=e;t&&(t.nextDep=i,e.prevDep=void 0),i&&(i.prevDep=t,e.nextDep=void 0)}let Ba=!0;const m2=[];function Ds(){m2.push(Ba),Ba=!1}function xs(){const e=m2.pop();Ba=e===void 0?!0:e}function $0(e){const{cleanup:t}=e;if(e.cleanup=void 0,t){const i=hi;hi=void 0;try{t()}finally{hi=i}}}let Ju=0;class tB{constructor(t
|
|||
|
* @vue/runtime-core v3.5.17
|
|||
|
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
|||
|
* @license MIT
|
|||
|
**/function ed(e,t,i,n){try{return n?e(...n):e()}catch(a){cA(a,t,i)}}function Da(e,t,i,n){if(Et(e)){const a=ed(e,t,i,n);return a&&t2(a)&&a.catch(s=>{cA(s,t,i)}),a}if(vt(e)){const a=[];for(let s=0;s<e.length;s++)a.push(Da(e[s],t,i,n));return a}}function cA(e,t,i,n=!0){const a=t?t.vnode:null,{errorHandler:s,throwUnhandledErrorInProduction:r}=t&&t.appContext.config||oi;if(t){let o=t.parent;const l=t.proxy,c=`https://vuejs.org/error-reference/#runtime-${i}`;for(;o;){const f=o.ec;if(f){for(let h=0;h<f.length;h++)if(f[h](e,l,c)===!1)return}o=o.parent}if(s){Ds(),ed(s,null,10,[e,l,c]),xs();return}}BB(e,i,a,n,r)}function BB(e,t,i,n=!0,a=!1){if(a)throw e;console.error(e)}const Bn=[];let ja=-1;const sl=[];let ir=null,Wo=0;const x2=Promise.resolve();let Nf=null;function dA(e){const t=Nf||x2;return e?t.then(this?e.bind(this):e):t}function SB(e){let t=ja+1,i=Bn.length;for(;t<i;){const n=t+i>>>1,a=Bn[n],s=$u(a);s<e||s===e&&a.flags&2?t=n+1:i=n}return t}function o1(e){if(!(e.flags&1)){const t=$u(e),i=Bn[Bn.length-1];!i||!(e.flags&2)&&t>=$u(i)?Bn.push(e):Bn.splice(SB(t),0,e),e.flags|=1,T2()}}function T2(){Nf||(Nf=x2.then(P2))}function DB(e){vt(e)?sl.push(...e):ir&&e.id===-1?ir.splice(Wo+1,0,e):e.flags&1||(sl.push(e),e.flags|=1),T2()}function tF(e,t,i=ja+1){for(;i<Bn.length;i++){const n=Bn[i];if(n&&n.flags&2){if(e&&n.id!==e.uid)continue;Bn.splice(i,1),i--,n.flags&4&&(n.flags&=-2),n(),n.flags&4||(n.flags&=-2)}}}function I2(e){if(sl.length){const t=[...new Set(sl)].sort((i,n)=>$u(i)-$u(n));if(sl.length=0,ir){ir.push(...t);return}for(ir=t,Wo=0;Wo<ir.length;Wo++){const i=ir[Wo];i.flags&4&&(i.flags&=-2),i.flags&8||i(),i.flags&=-2}ir=null,Wo=0}}const $u=e=>e.id==null?e.flags&2?-1:1/0:e.id;function P2(e){try{for(ja=0;ja<Bn.length;ja++){const t=Bn[ja];t&&!(t.flags&8)&&(t.flags&4&&(t.flags&=-2),ed(t,t.i,t.i?15:14),t.flags&4||(t.flags&=-2))}}finally{for(;ja<Bn.length;ja++){const t=Bn[ja];t&&(t.flags&=-2)}ja=-1,Bn.length=0,I2(),Nf=null,(Bn.length||sl.length)&&P2()}}let Ki=null,M2=null;function Hf(e){const t=Ki;return Ki=e,M2=e&&e.type.__scopeId||null,t}function It(e,t=Ki,i){if(!t||e._n)return e;const n=(...a)=>{n._d&&fF(-1);const s=Hf(t);let r;try{r=e(...a)}finally{Hf(s),n._d&&fF(1)}return r};return n._n=!0,n._c=!0,n._d=!0,n}function Re(e,t){if(Ki===null)return e;const i=pA(Ki),n=e.dirs||(e.dirs=[]);for(let a=0;a<t.length;a++){let[s,r,o,l=oi]=t[a];s&&(Et(s)&&(s={mounted:s,updated:s}),s.deep&&ks(r),n.push({dir:s,instance:i,value:r,oldValue:void 0,arg:o,modifiers:l}))}return e}function Qr(e,t,i,n){const a=e.dirs,s=t&&t.dirs;for(let r=0;r<a.length;r++){const o=a[r];s&&(o.oldValue=s[r].value);let l=o.dir[n];l&&(Ds(),Da(l,i,8,[e.el,o,e,t]),xs())}}const xB=Symbol("_vte"),L2=e=>e.__isTeleport,nr=Symbol("_leaveCb"),Ld=Symbol("_enterCb");function TB(){const e={isMounted:!1,isLeaving:!1,isUnmounting:!1,leavingVNodes:new Map};return Fa(()=>{e.isMounted=!0}),as(()=>{e.isUnmounting=!0}),e}const ra=[Function,Array],R2={mode:String,appear:Boolean,persisted:Boolean,onBeforeEnter:ra,onEnter:ra,onAfterEnter:ra,onEnterCancelled:ra,onBeforeLeave:ra,onLeave:ra,onAfterLeave:ra,onLeaveCancelled:ra,onBeforeAppear:ra,onAppear:ra,onAfterAppear:ra,onAppearCancelled:ra},U2=e=>{const t=e.subTree;return t.component?U2(t.component):t},IB={name:"BaseTransition",props:R2,setup(e,{slots:t}){const i=C6(),n=TB();return()=>{const a=t.default&&H2(t.default(),!0);if(!a||!a.length)return;const s=O2(a),r=qt(e),{mode:o}=r;if(n.isLeaving)return xg(s);const l=iF(s);if(!l)return xg(s);let c=Km(l,r,n,i,h=>c=h);l.type!==pn&&ec(l,c);let f=i.subTree&&iF(i.subTree);if(f&&f.type!==pn&&!Wr(l,f)&&U2(i).type!==pn){let h=Km(f,r,n,i);if(ec(f,h),o==="out-in"&&l.type!==pn)return n.isLeaving=!0,h.afterLeave=()=>{n.isLeaving=!1,i.job.flags&8||i.update(),delete h.afterLeave,f=void 0},xg(s);o==="in-out"&&l.type!==pn?h.delayLeave=(A,m,F)=>{const y=N2(n,f);y[String(f.key)]=f,A[nr]=()=>{m(),A[nr]=void 0,delete c.delayedLeave,f=void 0},c.delayedLeave=()=>{F(),delete c.delayedLeave,f=void 0}}:f=void 0}else f&&(f=void 0);return s}}};function O2(e){let t=e[0];if(e.length>1){for(const i of e)if(i.type!==pn){t=i;break}
|
|||
|
* @vue/runtime-dom v3.5.17
|
|||
|
* (c) 2018-present Yuxi (Evan) You and Vue contributors
|
|||
|
* @license MIT
|
|||
|
**/let ep;const gF=typeof window<"u"&&window.trustedTypes;if(gF)try{ep=gF.createPolicy("vue",{createHTML:e=>e})}catch{}const mw=ep?e=>ep.createHTML(e):e=>e,I6="http://www.w3.org/2000/svg",P6="http://www.w3.org/1998/Math/MathML",bs=typeof document<"u"?document:null,mF=bs&&bs.createElement("template"),M6={insert:(e,t,i)=>{t.insertBefore(e,i||null)},remove:e=>{const t=e.parentNode;t&&t.removeChild(e)},createElement:(e,t,i,n)=>{const a=t==="svg"?bs.createElementNS(I6,e):t==="mathml"?bs.createElementNS(P6,e):i?bs.createElement(e,{is:i}):bs.createElement(e);return e==="select"&&n&&n.multiple!=null&&a.setAttribute("multiple",n.multiple),a},createText:e=>bs.createTextNode(e),createComment:e=>bs.createComment(e),setText:(e,t)=>{e.nodeValue=t},setElementText:(e,t)=>{e.textContent=t},parentNode:e=>e.parentNode,nextSibling:e=>e.nextSibling,querySelector:e=>bs.querySelector(e),setScopeId(e,t){e.setAttribute(t,"")},insertStaticContent(e,t,i,n,a,s){const r=i?i.previousSibling:t.lastChild;if(a&&(a===s||a.nextSibling))for(;t.insertBefore(a.cloneNode(!0),i),!(a===s||!(a=a.nextSibling)););else{mF.innerHTML=mw(n==="svg"?`<svg>${e}</svg>`:n==="mathml"?`<math>${e}</math>`:e);const o=mF.content;if(n==="svg"||n==="mathml"){const l=o.firstChild;for(;l.firstChild;)o.appendChild(l.firstChild);o.removeChild(l)}t.insertBefore(o,i)}return[r?r.nextSibling:t.firstChild,i?i.previousSibling:t.lastChild]}},Ys="transition",lu="animation",ac=Symbol("_vtc"),pw={name:String,type:String,css:{type:Boolean,default:!0},duration:[String,Number,Object],enterFromClass:String,enterActiveClass:String,enterToClass:String,appearFromClass:String,appearActiveClass:String,appearToClass:String,leaveFromClass:String,leaveActiveClass:String,leaveToClass:String},L6=tn({},R2,pw),R6=e=>(e.displayName="Transition",e.props=L6,e),Zs=R6((e,{slots:t})=>ai(PB,U6(e),t)),Gr=(e,t=[])=>{vt(e)?e.forEach(i=>i(...t)):e&&e(...t)},pF=e=>e?vt(e)?e.some(t=>t.length>1):e.length>1:!1;function U6(e){const t={};for(const ae in e)ae in pw||(t[ae]=e[ae]);if(e.css===!1)return t;const{name:i="v",type:n,duration:a,enterFromClass:s=`${i}-enter-from`,enterActiveClass:r=`${i}-enter-active`,enterToClass:o=`${i}-enter-to`,appearFromClass:l=s,appearActiveClass:c=r,appearToClass:f=o,leaveFromClass:h=`${i}-leave-from`,leaveActiveClass:A=`${i}-leave-active`,leaveToClass:m=`${i}-leave-to`}=e,F=O6(a),y=F&&F[0],k=F&&F[1],{onBeforeEnter:C,onEnter:w,onEnterCancelled:B,onLeave:S,onLeaveCancelled:U,onBeforeAppear:N=C,onAppear:z=w,onAppearCancelled:Q=B}=t,R=(ae,Fe,ge,le)=>{ae._enterCancelled=le,zr(ae,Fe?f:o),zr(ae,Fe?c:r),ge&&ge()},q=(ae,Fe)=>{ae._isLeaving=!1,zr(ae,h),zr(ae,m),zr(ae,A),Fe&&Fe()},J=ae=>(Fe,ge)=>{const le=ae?z:w,re=()=>R(Fe,ae,ge);Gr(le,[Fe,re]),_F(()=>{zr(Fe,ae?l:s),As(Fe,ae?f:o),pF(le)||FF(Fe,n,y,re)})};return tn(t,{onBeforeEnter(ae){Gr(C,[ae]),As(ae,s),As(ae,r)},onBeforeAppear(ae){Gr(N,[ae]),As(ae,l),As(ae,c)},onEnter:J(!1),onAppear:J(!0),onLeave(ae,Fe){ae._isLeaving=!0;const ge=()=>q(ae,Fe);As(ae,h),ae._enterCancelled?(As(ae,A),yF()):(yF(),As(ae,A)),_F(()=>{ae._isLeaving&&(zr(ae,h),As(ae,m),pF(S)||FF(ae,n,k,ge))}),Gr(S,[ae,ge])},onEnterCancelled(ae){R(ae,!1,void 0,!0),Gr(B,[ae])},onAppearCancelled(ae){R(ae,!0,void 0,!0),Gr(Q,[ae])},onLeaveCancelled(ae){q(ae),Gr(U,[ae])}})}function O6(e){if(e==null)return null;if(bi(e))return[Pg(e.enter),Pg(e.leave)];{const t=Pg(e);return[t,t]}}function Pg(e){return VE(e)}function As(e,t){t.split(/\s+/).forEach(i=>i&&e.classList.add(i)),(e[ac]||(e[ac]=new Set)).add(t)}function zr(e,t){t.split(/\s+/).forEach(n=>n&&e.classList.remove(n));const i=e[ac];i&&(i.delete(t),i.size||(e[ac]=void 0))}function _F(e){requestAnimationFrame(()=>{requestAnimationFrame(e)})}let N6=0;function FF(e,t,i,n){const a=e._endId=++N6,s=()=>{a===e._endId&&n()};if(i!=null)return setTimeout(s,i);const{type:r,timeout:o,propCount:l}=H6(e,t);if(!r)return n();const c=r+"end";let f=0;const h=()=>{e.removeEventListener(c,A),s()},A=m=>{m.target===e&&++f>=l&&h()};setTimeout(()=>{f<l&&h()},o+1),e.addEventListener(c,A)}function H6(e,t){const i=window.getComputedStyle(e),n=F=>(i[F]||"").split(", "),a=n(`${Ys
|
|||
|
* pinia v2.3.1
|
|||
|
* (c) 2025 Eduardo San Martin Morote
|
|||
|
* @license MIT
|
|||
|
*/let Fw;const _A=e=>Fw=e,bw=Symbol();function tp(e){return e&&typeof e=="object"&&Object.prototype.toString.call(e)==="[object Object]"&&typeof e.toJSON!="function"}var Ru;(function(e){e.direct="direct",e.patchObject="patch object",e.patchFunction="patch function"})(Ru||(Ru={}));function dS(){const e=l2(!0),t=e.run(()=>Yt({}));let i=[],n=[];const a=r1({install(s){_A(a),a._a=s,s.provide(bw,a),s.config.globalProperties.$pinia=a,n.forEach(r=>i.push(r)),n=[]},use(s){return this._a?i.push(s):n.push(s),this},_p:i,_a:null,_e:e,_s:new Map,state:t});return a}const vw=()=>{};function MF(e,t,i,n=vw){e.push(t);const a=()=>{const s=e.indexOf(t);s>-1&&(e.splice(s,1),n())};return!i&&u2()&&$E(a),a}function Oo(e,...t){e.slice().forEach(i=>{i(...t)})}const fS=e=>e(),LF=Symbol(),Og=Symbol();function ip(e,t){e instanceof Map&&t instanceof Map?t.forEach((i,n)=>e.set(n,i)):e instanceof Set&&t instanceof Set&&t.forEach(e.add,e);for(const i in t){if(!t.hasOwnProperty(i))continue;const n=t[i],a=e[i];tp(a)&&tp(n)&&e.hasOwnProperty(i)&&!Di(n)&&!gr(n)?e[i]=ip(a,n):e[i]=n}return e}const hS=Symbol();function AS(e){return!tp(e)||!e.hasOwnProperty(hS)}const{assign:$s}=Object;function gS(e){return!!(Di(e)&&e.effect)}function mS(e,t,i,n){const{state:a,actions:s,getters:r}=t,o=i.state.value[e];let l;function c(){o||(i.state.value[e]=a?a():{});const f=D2(i.state.value[e]);return $s(f,s,Object.keys(r||{}).reduce((h,A)=>(h[A]=r1(Si(()=>{_A(i);const m=i._s.get(e);return r[A].call(m,m)})),h),{}))}return l=yw(e,c,t,i,n,!0),l}function yw(e,t,i={},n,a,s){let r;const o=$s({actions:{}},i),l={deep:!0};let c,f,h=[],A=[],m;const F=n.state.value[e];!s&&!F&&(n.state.value[e]={}),Yt({});let y;function k(Q){let R;c=f=!1,typeof Q=="function"?(Q(n.state.value[e]),R={type:Ru.patchFunction,storeId:e,events:m}):(ip(n.state.value[e],Q),R={type:Ru.patchObject,payload:Q,storeId:e,events:m});const q=y=Symbol();dA().then(()=>{y===q&&(c=!0)}),f=!0,Oo(h,R,n.state.value[e])}const C=s?function(){const{state:R}=i,q=R?R():{};this.$patch(J=>{$s(J,q)})}:vw;function w(){r.stop(),h=[],A=[],n._s.delete(e)}const B=(Q,R="")=>{if(LF in Q)return Q[Og]=R,Q;const q=function(){_A(n);const J=Array.from(arguments),ae=[],Fe=[];function ge(pe){ae.push(pe)}function le(pe){Fe.push(pe)}Oo(A,{args:J,name:q[Og],store:U,after:ge,onError:le});let re;try{re=Q.apply(this&&this.$id===e?this:U,J)}catch(pe){throw Oo(Fe,pe),pe}return re instanceof Promise?re.then(pe=>(Oo(ae,pe),pe)).catch(pe=>(Oo(Fe,pe),Promise.reject(pe))):(Oo(ae,re),re)};return q[LF]=!0,q[Og]=R,q},S={_p:n,$id:e,$onAction:MF.bind(null,A),$patch:k,$reset:C,$subscribe(Q,R={}){const q=MF(h,Q,R.detached,()=>J()),J=r.run(()=>mr(()=>n.state.value[e],ae=>{(R.flush==="sync"?f:c)&&Q({storeId:e,type:Ru.direct,events:m},ae)},$s({},l,R)));return q},$dispose:w},U=$c(S);n._s.set(e,U);const z=(n._a&&n._a.runWithContext||fS)(()=>n._e.run(()=>(r=l2()).run(()=>t({action:B}))));for(const Q in z){const R=z[Q];if(Di(R)&&!gS(R)||gr(R))s||(F&&AS(R)&&(Di(R)?R.value=F[Q]:ip(R,F[Q])),n.state.value[e][Q]=R);else if(typeof R=="function"){const q=B(R,Q);z[Q]=q,o.actions[Q]=R}}return $s(U,z),$s(qt(U),z),Object.defineProperty(U,"$state",{get:()=>n.state.value[e],set:Q=>{k(R=>{$s(R,Q)})}}),n._p.forEach(Q=>{$s(U,r.run(()=>Q({store:U,app:n._a,pinia:n,options:o})))}),F&&s&&i.hydrate&&i.hydrate(U.$state,F),c=!0,f=!0,U}/*! #__NO_SIDE_EFFECTS__ */function ti(e,t,i){let n,a;const s=typeof t=="function";typeof e=="string"?(n=e,a=s?i:t):(a=e,n=e.id);function r(o,l){const c=JB();return o=o||(c?Sa(bw,null):null),o&&_A(o),o=Fw,o._s.has(n)||(s?yw(n,t,a,o):mS(n,a,o)),o._s.get(n)}return r.$id=n,r}function Z(e,t){return Array.isArray(t)?t.reduce((i,n)=>(i[n]=function(){return e(this.$pinia)[n]},i),{}):Object.keys(t).reduce((i,n)=>(i[n]=function(){const a=e(this.$pinia),s=t[n];return typeof s=="function"?s.call(this,a):a[s]},i),{})}function xe(e,t){return Array.isArray(t)?t.reduce((i,n)=>(i[n]=function(...a){return e(this.$pinia)[n](...a)},i),{}):Object.keys(t).reduce((i,n)=>(i[n]=function(...a){return e(this.$pinia)[t[n]](...a)},i),{})}const ue=ti("phone",{state:()=>({show:!1,notifyshow:!1,la
|
|||
|
`).forEach(function(r){a=r.indexOf(":"),i=r.substring(0,a).trim().toLowerCase(),n=r.substring(a+1).trim(),!(!i||t[i]&&px[i])&&(i==="set-cookie"?t[i]?t[i].push(n):t[i]=[n]:t[i]=t[i]?t[i]+", "+n:n)}),t},ZF=Symbol("internals");function fu(e){return e&&String(e).trim().toLowerCase()}function Cf(e){return e===!1||e==null?e:Le.isArray(e)?e.map(Cf):String(e)}function Fx(e){const t=Object.create(null),i=/([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;let n;for(;n=i.exec(e);)t[n[1]]=n[2];return t}const bx=e=>/^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(e.trim());function Qg(e,t,i,n,a){if(Le.isFunction(n))return n.call(this,t,i);if(a&&(t=i),!!Le.isString(t)){if(Le.isString(n))return t.indexOf(n)!==-1;if(Le.isRegExp(n))return n.test(t)}}function vx(e){return e.trim().toLowerCase().replace(/([a-z\d])(\w*)/g,(t,i,n)=>i.toUpperCase()+n)}function yx(e,t){const i=Le.toCamelCase(" "+t);["get","set","has"].forEach(n=>{Object.defineProperty(e,n+i,{value:function(a,s,r){return this[n].call(this,t,a,s,r)},configurable:!0})})}let Hn=class{constructor(t){t&&this.set(t)}set(t,i,n){const a=this;function s(o,l,c){const f=fu(l);if(!f)throw new Error("header name must be a non-empty string");const h=Le.findKey(a,f);(!h||a[h]===void 0||c===!0||c===void 0&&a[h]!==!1)&&(a[h||l]=Cf(o))}const r=(o,l)=>Le.forEach(o,(c,f)=>s(c,f,l));if(Le.isPlainObject(t)||t instanceof this.constructor)r(t,i);else if(Le.isString(t)&&(t=t.trim())&&!bx(t))r(_x(t),i);else if(Le.isObject(t)&&Le.isIterable(t)){let o={},l,c;for(const f of t){if(!Le.isArray(f))throw TypeError("Object iterator must return a key-value pair");o[c=f[0]]=(l=o[c])?Le.isArray(l)?[...l,f[1]]:[l,f[1]]:f[1]}r(o,i)}else t!=null&&s(i,t,n);return this}get(t,i){if(t=fu(t),t){const n=Le.findKey(this,t);if(n){const a=this[n];if(!i)return a;if(i===!0)return Fx(a);if(Le.isFunction(i))return i.call(this,a,n);if(Le.isRegExp(i))return i.exec(a);throw new TypeError("parser must be boolean|regexp|function")}}}has(t,i){if(t=fu(t),t){const n=Le.findKey(this,t);return!!(n&&this[n]!==void 0&&(!i||Qg(this,this[n],n,i)))}return!1}delete(t,i){const n=this;let a=!1;function s(r){if(r=fu(r),r){const o=Le.findKey(n,r);o&&(!i||Qg(n,n[o],o,i))&&(delete n[o],a=!0)}}return Le.isArray(t)?t.forEach(s):s(t),a}clear(t){const i=Object.keys(this);let n=i.length,a=!1;for(;n--;){const s=i[n];(!t||Qg(this,this[s],s,t,!0))&&(delete this[s],a=!0)}return a}normalize(t){const i=this,n={};return Le.forEach(this,(a,s)=>{const r=Le.findKey(n,s);if(r){i[r]=Cf(a),delete i[s];return}const o=t?vx(s):String(s).trim();o!==s&&delete i[s],i[o]=Cf(a),n[o]=!0}),this}concat(...t){return this.constructor.concat(this,...t)}toJSON(t){const i=Object.create(null);return Le.forEach(this,(n,a)=>{n!=null&&n!==!1&&(i[a]=t&&Le.isArray(n)?n.join(", "):n)}),i}[Symbol.iterator](){return Object.entries(this.toJSON())[Symbol.iterator]()}toString(){return Object.entries(this.toJSON()).map(([t,i])=>t+": "+i).join(`
|
|||
|
`)}getSetCookie(){return this.get("set-cookie")||[]}get[Symbol.toStringTag](){return"AxiosHeaders"}static from(t){return t instanceof this?t:new this(t)}static concat(t,...i){const n=new this(t);return i.forEach(a=>n.set(a)),n}static accessor(t){const n=(this[ZF]=this[ZF]={accessors:{}}).accessors,a=this.prototype;function s(r){const o=fu(r);n[o]||(yx(a,r),n[o]=!0)}return Le.isArray(t)?t.forEach(s):s(t),this}};Hn.accessor(["Content-Type","Content-Length","Accept","Accept-Encoding","User-Agent","Authorization"]);Le.reduceDescriptors(Hn.prototype,({value:e},t)=>{let i=t[0].toUpperCase()+t.slice(1);return{get:()=>e,set(n){this[i]=n}}});Le.freezeMethods(Hn);function jg(e,t){const i=this||ad,n=t||i,a=Hn.from(n.headers);let s=n.data;return Le.forEach(e,function(o){s=o.call(i,s,a.normalize(),t?t.status:void 0)}),a.normalize(),s}function Xw(e){return!!(e&&e.__CANCEL__)}function Ql(e,t,i){xt.call(this,e??"canceled",xt.ERR_CANCELED,t,i),this.name="CanceledError"}Le.inherits(Ql,xt,{__CANCEL__:!0});function Jw(e,t,i){const n=i.config.validateStatus;!i.status||!n||n(i.status)?e(i):t(new xt("Request failed with status code "+i.status,[xt.ERR_BAD_REQUEST,xt.ERR_BAD_RESPONSE][Math.floor(i.status/100)-4],i.config,i.request,i))}function wx(e){const t=/^([-+\w]{1,25})(:?\/\/|:)/.exec(e);return t&&t[1]||""}function Cx(e,t){e=e||10;const i=new Array(e),n=new Array(e);let a=0,s=0,r;return t=t!==void 0?t:1e3,function(l){const c=Date.now(),f=n[s];r||(r=c),i[a]=l,n[a]=c;let h=s,A=0;for(;h!==a;)A+=i[h++],h=h%e;if(a=(a+1)%e,a===s&&(s=(s+1)%e),c-r<t)return;const m=f&&c-f;return m?Math.round(A*1e3/m):void 0}}function kx(e,t){let i=0,n=1e3/t,a,s;const r=(c,f=Date.now())=>{i=f,a=null,s&&(clearTimeout(s),s=null),e.apply(null,c)};return[(...c)=>{const f=Date.now(),h=f-i;h>=n?r(c,f):(a=c,s||(s=setTimeout(()=>{s=null,r(a)},n-h)))},()=>a&&r(a)]}const qf=(e,t,i=3)=>{let n=0;const a=Cx(50,250);return kx(s=>{const r=s.loaded,o=s.lengthComputable?s.total:void 0,l=r-n,c=a(l),f=r<=o;n=r;const h={loaded:r,total:o,progress:o?r/o:void 0,bytes:l,rate:c||void 0,estimated:c&&o&&f?(o-r)/c:void 0,event:s,lengthComputable:o!=null,[t?"download":"upload"]:!0};e(h)},i)},$F=(e,t)=>{const i=e!=null;return[n=>t[0]({lengthComputable:i,total:e,loaded:n}),t[1]]},eb=e=>(...t)=>Le.asap(()=>e(...t)),Ex=_n.hasStandardBrowserEnv?((e,t)=>i=>(i=new URL(i,_n.origin),e.protocol===i.protocol&&e.host===i.host&&(t||e.port===i.port)))(new URL(_n.origin),_n.navigator&&/(msie|trident)/i.test(_n.navigator.userAgent)):()=>!0,Bx=_n.hasStandardBrowserEnv?{write(e,t,i,n,a,s){const r=[e+"="+encodeURIComponent(t)];Le.isNumber(i)&&r.push("expires="+new Date(i).toGMTString()),Le.isString(n)&&r.push("path="+n),Le.isString(a)&&r.push("domain="+a),s===!0&&r.push("secure"),document.cookie=r.join("; ")},read(e){const t=document.cookie.match(new RegExp("(^|;\\s*)("+e+")=([^;]*)"));return t?decodeURIComponent(t[3]):null},remove(e){this.write(e,"",Date.now()-864e5)}}:{write(){},read(){return null},remove(){}};function Sx(e){return/^([a-z][a-z\d+\-.]*:)?\/\//i.test(e)}function Dx(e,t){return t?e.replace(/\/?\/$/,"")+"/"+t.replace(/^\/+/,""):e}function Zw(e,t,i){let n=!Sx(t);return e&&(n||i==!1)?Dx(e,t):t}const tb=e=>e instanceof Hn?{...e}:e;function co(e,t){t=t||{};const i={};function n(c,f,h,A){return Le.isPlainObject(c)&&Le.isPlainObject(f)?Le.merge.call({caseless:A},c,f):Le.isPlainObject(f)?Le.merge({},f):Le.isArray(f)?f.slice():f}function a(c,f,h,A){if(Le.isUndefined(f)){if(!Le.isUndefined(c))return n(void 0,c,h,A)}else return n(c,f,h,A)}function s(c,f){if(!Le.isUndefined(f))return n(void 0,f)}function r(c,f){if(Le.isUndefined(f)){if(!Le.isUndefined(c))return n(void 0,c)}else return n(void 0,f)}function o(c,f,h){if(h in t)return n(c,f);if(h in e)return n(void 0,c)}const l={url:s,method:s,data:s,baseURL:r,transformRequest:r,transformResponse:r,paramsSerializer:r,timeout:r,timeoutMessage:r,withCredentials:r,withXSRFToken:r,adapter:r,responseType:r,xsrfCookieName:r,xsrfHeaderName:r,onUploadProgress:r,onDownloadProgress:r,decompress:r,maxContentLength:r,maxBodyLength:r,beforeRedirect:r,transport:r,httpAgent:r,h
|
|||
|
`+s.map(ab).join(`
|
|||
|
`):" "+ab(s[0]):"as no adapter specified";throw new xt("There is no suitable adapter to dispatch the request "+r,"ERR_NOT_SUPPORT")}return n},adapters:dp};function Gg(e){if(e.cancelToken&&e.cancelToken.throwIfRequested(),e.signal&&e.signal.aborted)throw new Ql(null,e)}function sb(e){return Gg(e),e.headers=Hn.from(e.headers),e.data=jg.call(e,e.transformRequest),["post","put","patch"].indexOf(e.method)!==-1&&e.headers.setContentType("application/x-www-form-urlencoded",!1),i3.getAdapter(e.adapter||ad.adapter)(e).then(function(n){return Gg(e),n.data=jg.call(e,e.transformResponse,n),n.headers=Hn.from(n.headers),n},function(n){return Xw(n)||(Gg(e),n&&n.response&&(n.response.data=jg.call(e,e.transformResponse,n.response),n.response.headers=Hn.from(n.response.headers))),Promise.reject(n)})}const n3="1.10.0",BA={};["object","boolean","number","function","string","symbol"].forEach((e,t)=>{BA[e]=function(n){return typeof n===e||"a"+(t<1?"n ":" ")+e}});const rb={};BA.transitional=function(t,i,n){function a(s,r){return"[Axios v"+n3+"] Transitional option '"+s+"'"+r+(n?". "+n:"")}return(s,r,o)=>{if(t===!1)throw new xt(a(r," has been removed"+(i?" in "+i:"")),xt.ERR_DEPRECATED);return i&&!rb[r]&&(rb[r]=!0,console.warn(a(r," has been deprecated since v"+i+" and will be removed in the near future"))),t?t(s,r,o):!0}};BA.spelling=function(t){return(i,n)=>(console.warn(`${n} is likely a misspelling of ${t}`),!0)};function jx(e,t,i){if(typeof e!="object")throw new xt("options must be an object",xt.ERR_BAD_OPTION_VALUE);const n=Object.keys(e);let a=n.length;for(;a-- >0;){const s=n[a],r=t[s];if(r){const o=e[s],l=o===void 0||r(o,s,e);if(l!==!0)throw new xt("option "+s+" must be "+l,xt.ERR_BAD_OPTION_VALUE);continue}if(i!==!0)throw new xt("Unknown option "+s,xt.ERR_BAD_OPTION)}}const kf={assertOptions:jx,validators:BA},Ha=kf.validators;let so=class{constructor(t){this.defaults=t||{},this.interceptors={request:new JF,response:new JF}}async request(t,i){try{return await this._request(t,i)}catch(n){if(n instanceof Error){let a={};Error.captureStackTrace?Error.captureStackTrace(a):a=new Error;const s=a.stack?a.stack.replace(/^.+\n/,""):"";try{n.stack?s&&!String(n.stack).endsWith(s.replace(/^.+\n.+\n/,""))&&(n.stack+=`
|
|||
|
`+s):n.stack=s}catch{}}throw n}}_request(t,i){typeof t=="string"?(i=i||{},i.url=t):i=t||{},i=co(this.defaults,i);const{transitional:n,paramsSerializer:a,headers:s}=i;n!==void 0&&kf.assertOptions(n,{silentJSONParsing:Ha.transitional(Ha.boolean),forcedJSONParsing:Ha.transitional(Ha.boolean),clarifyTimeoutError:Ha.transitional(Ha.boolean)},!1),a!=null&&(Le.isFunction(a)?i.paramsSerializer={serialize:a}:kf.assertOptions(a,{encode:Ha.function,serialize:Ha.function},!0)),i.allowAbsoluteUrls!==void 0||(this.defaults.allowAbsoluteUrls!==void 0?i.allowAbsoluteUrls=this.defaults.allowAbsoluteUrls:i.allowAbsoluteUrls=!0),kf.assertOptions(i,{baseUrl:Ha.spelling("baseURL"),withXsrfToken:Ha.spelling("withXSRFToken")},!0),i.method=(i.method||this.defaults.method||"get").toLowerCase();let r=s&&Le.merge(s.common,s[i.method]);s&&Le.forEach(["delete","get","head","post","put","patch","common"],F=>{delete s[F]}),i.headers=Hn.concat(r,s);const o=[];let l=!0;this.interceptors.request.forEach(function(y){typeof y.runWhen=="function"&&y.runWhen(i)===!1||(l=l&&y.synchronous,o.unshift(y.fulfilled,y.rejected))});const c=[];this.interceptors.response.forEach(function(y){c.push(y.fulfilled,y.rejected)});let f,h=0,A;if(!l){const F=[sb.bind(this),void 0];for(F.unshift.apply(F,o),F.push.apply(F,c),A=F.length,f=Promise.resolve(i);h<A;)f=f.then(F[h++],F[h++]);return f}A=o.length;let m=i;for(h=0;h<A;){const F=o[h++],y=o[h++];try{m=F(m)}catch(k){y.call(this,k);break}}try{f=sb.call(this,m)}catch(F){return Promise.reject(F)}for(h=0,A=c.length;h<A;)f=f.then(c[h++],c[h++]);return f}getUri(t){t=co(this.defaults,t);const i=Zw(t.baseURL,t.url,t.allowAbsoluteUrls);return Ww(i,t.params,t.paramsSerializer)}};Le.forEach(["delete","get","head","options"],function(t){so.prototype[t]=function(i,n){return this.request(co(n||{},{method:t,url:i,data:(n||{}).data}))}});Le.forEach(["post","put","patch"],function(t){function i(n){return function(s,r,o){return this.request(co(o||{},{method:t,headers:n?{"Content-Type":"multipart/form-data"}:{},url:s,data:r}))}}so.prototype[t]=i(),so.prototype[t+"Form"]=i(!0)});let Gx=class a3{constructor(t){if(typeof t!="function")throw new TypeError("executor must be a function.");let i;this.promise=new Promise(function(s){i=s});const n=this;this.promise.then(a=>{if(!n._listeners)return;let s=n._listeners.length;for(;s-- >0;)n._listeners[s](a);n._listeners=null}),this.promise.then=a=>{let s;const r=new Promise(o=>{n.subscribe(o),s=o}).then(a);return r.cancel=function(){n.unsubscribe(s)},r},t(function(s,r,o){n.reason||(n.reason=new Ql(s,r,o),i(n.reason))})}throwIfRequested(){if(this.reason)throw this.reason}subscribe(t){if(this.reason){t(this.reason);return}this._listeners?this._listeners.push(t):this._listeners=[t]}unsubscribe(t){if(!this._listeners)return;const i=this._listeners.indexOf(t);i!==-1&&this._listeners.splice(i,1)}toAbortSignal(){const t=new AbortController,i=n=>{t.abort(n)};return this.subscribe(i),t.signal.unsubscribe=()=>this.unsubscribe(i),t.signal}static source(){let t;return{token:new a3(function(a){t=a}),cancel:t}}};function zx(e){return function(i){return e.apply(null,i)}}function qx(e){return Le.isObject(e)&&e.isAxiosError===!0}const fp={Continue:100,SwitchingProtocols:101,Processing:102,EarlyHints:103,Ok:200,Created:201,Accepted:202,NonAuthoritativeInformation:203,NoContent:204,ResetContent:205,PartialContent:206,MultiStatus:207,AlreadyReported:208,ImUsed:226,MultipleChoices:300,MovedPermanently:301,Found:302,SeeOther:303,NotModified:304,UseProxy:305,Unused:306,TemporaryRedirect:307,PermanentRedirect:308,BadRequest:400,Unauthorized:401,PaymentRequired:402,Forbidden:403,NotFound:404,MethodNotAllowed:405,NotAcceptable:406,ProxyAuthenticationRequired:407,RequestTimeout:408,Conflict:409,Gone:410,LengthRequired:411,PreconditionFailed:412,PayloadTooLarge:413,UriTooLong:414,UnsupportedMediaType:415,RangeNotSatisfiable:416,ExpectationFailed:417,ImATeapot:418,MisdirectedRequest:421,UnprocessableEntity:422,Locked:423,FailedDependency:424,TooEarly:425,UpgradeRequired:426,PreconditionRequired:428,TooManyRequests:429,RequestHeaderFie
|
|||
|
* vue-router v4.5.1
|
|||
|
* (c) 2025 Eduardo San Martin Morote
|
|||
|
* @license MIT
|
|||
|
*/const Yo=typeof document<"u";function r3(e){return typeof e=="object"||"displayName"in e||"props"in e||"__vccOpts"in e}function Vx(e){return e.__esModule||e[Symbol.toStringTag]==="Module"||e.default&&r3(e.default)}const $t=Object.assign;function zg(e,t){const i={};for(const n in t){const a=t[n];i[n]=xa(a)?a.map(e):e(a)}return i}const Uu=()=>{},xa=Array.isArray,o3=/#/g,Wx=/&/g,Kx=/\//g,Yx=/=/g,Xx=/\?/g,l3=/\+/g,Jx=/%5B/g,Zx=/%5D/g,u3=/%5E/g,$x=/%60/g,c3=/%7B/g,eT=/%7C/g,d3=/%7D/g,tT=/%20/g;function _1(e){return encodeURI(""+e).replace(eT,"|").replace(Jx,"[").replace(Zx,"]")}function iT(e){return _1(e).replace(c3,"{").replace(d3,"}").replace(u3,"^")}function hp(e){return _1(e).replace(l3,"%2B").replace(tT,"+").replace(o3,"%23").replace(Wx,"%26").replace($x,"`").replace(c3,"{").replace(d3,"}").replace(u3,"^")}function nT(e){return hp(e).replace(Yx,"%3D")}function aT(e){return _1(e).replace(o3,"%23").replace(Xx,"%3F")}function sT(e){return e==null?"":aT(e).replace(Kx,"%2F")}function oc(e){try{return decodeURIComponent(""+e)}catch{}return""+e}const rT=/\/$/,oT=e=>e.replace(rT,"");function qg(e,t,i="/"){let n,a={},s="",r="";const o=t.indexOf("#");let l=t.indexOf("?");return o<l&&o>=0&&(l=-1),l>-1&&(n=t.slice(0,l),s=t.slice(l+1,o>-1?o:t.length),a=e(s)),o>-1&&(n=n||t.slice(0,o),r=t.slice(o,t.length)),n=dT(n??t,i),{fullPath:n+(s&&"?")+s+r,path:n,query:a,hash:oc(r)}}function lT(e,t){const i=t.query?e(t.query):"";return t.path+(i&&"?")+i+(t.hash||"")}function ob(e,t){return!t||!e.toLowerCase().startsWith(t.toLowerCase())?e:e.slice(t.length)||"/"}function uT(e,t,i){const n=t.matched.length-1,a=i.matched.length-1;return n>-1&&n===a&&Al(t.matched[n],i.matched[a])&&f3(t.params,i.params)&&e(t.query)===e(i.query)&&t.hash===i.hash}function Al(e,t){return(e.aliasOf||e)===(t.aliasOf||t)}function f3(e,t){if(Object.keys(e).length!==Object.keys(t).length)return!1;for(const i in e)if(!cT(e[i],t[i]))return!1;return!0}function cT(e,t){return xa(e)?lb(e,t):xa(t)?lb(t,e):e===t}function lb(e,t){return xa(t)?e.length===t.length&&e.every((i,n)=>i===t[n]):e.length===1&&e[0]===t}function dT(e,t){if(e.startsWith("/"))return e;if(!e)return t;const i=t.split("/"),n=e.split("/"),a=n[n.length-1];(a===".."||a===".")&&n.push("");let s=i.length-1,r,o;for(r=0;r<n.length;r++)if(o=n[r],o!==".")if(o==="..")s>1&&s--;else break;return i.slice(0,s).join("/")+"/"+n.slice(r).join("/")}const Xs={path:"/",name:void 0,params:{},query:{},hash:"",fullPath:"/",matched:[],meta:{},redirectedFrom:void 0};var lc;(function(e){e.pop="pop",e.push="push"})(lc||(lc={}));var Ou;(function(e){e.back="back",e.forward="forward",e.unknown=""})(Ou||(Ou={}));function fT(e){if(!e)if(Yo){const t=document.querySelector("base");e=t&&t.getAttribute("href")||"/",e=e.replace(/^\w+:\/\/[^\/]+/,"")}else e="/";return e[0]!=="/"&&e[0]!=="#"&&(e="/"+e),oT(e)}const hT=/^[^#]+#/;function AT(e,t){return e.replace(hT,"#")+t}function gT(e,t){const i=document.documentElement.getBoundingClientRect(),n=e.getBoundingClientRect();return{behavior:t.behavior,left:n.left-i.left-(t.left||0),top:n.top-i.top-(t.top||0)}}const SA=()=>({left:window.scrollX,top:window.scrollY});function mT(e){let t;if("el"in e){const i=e.el,n=typeof i=="string"&&i.startsWith("#"),a=typeof i=="string"?n?document.getElementById(i.slice(1)):document.querySelector(i):i;if(!a)return;t=gT(a,e)}else t=e;"scrollBehavior"in document.documentElement.style?window.scrollTo(t):window.scrollTo(t.left!=null?t.left:window.scrollX,t.top!=null?t.top:window.scrollY)}function ub(e,t){return(history.state?history.state.position-t:-1)+e}const Ap=new Map;function pT(e,t){Ap.set(e,t)}function _T(e){const t=Ap.get(e);return Ap.delete(e),t}let FT=()=>location.protocol+"//"+location.host;function h3(e,t){const{pathname:i,search:n,hash:a}=t,s=e.indexOf("#");if(s>-1){let o=a.includes(e.slice(s))?e.slice(s).length:1,l=a.slice(o);return l[0]!=="/"&&(l="/"+l),ob(l,"")}return ob(i,e)+n+a}function bT(e,t,i,n){let a=[],s=[],r=null;const o=({state:A})=>{const m=h3(e,location),F=i.value,y=t.value;let k=0;if(A){if(i.value=m,t.value=A,r&&r===F){r=null;return}k=y?A.position
|
|||
|
#ifdef USE_ALPHAMAP
|
|||
|
|
|||
|
diffuseColor.a *= texture2D( alphaMap, vUv ).g;
|
|||
|
|
|||
|
#endif
|
|||
|
`,mj=`
|
|||
|
#ifdef USE_ALPHAMAP
|
|||
|
|
|||
|
uniform sampler2D alphaMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,pj=`
|
|||
|
#ifdef ALPHATEST
|
|||
|
|
|||
|
if ( diffuseColor.a < ALPHATEST ) discard;
|
|||
|
|
|||
|
#endif
|
|||
|
`,_j=`
|
|||
|
#ifdef USE_AOMAP
|
|||
|
|
|||
|
// reads channel R, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
|
|||
|
float ambientOcclusion = ( texture2D( aoMap, vUv2 ).r - 1.0 ) * aoMapIntensity + 1.0;
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse *= ambientOcclusion;
|
|||
|
|
|||
|
#if defined( USE_ENVMAP ) && defined( PHYSICAL )
|
|||
|
|
|||
|
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
|
|||
|
reflectedLight.indirectSpecular *= computeSpecularOcclusion( dotNV, ambientOcclusion, material.specularRoughness );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,Fj=`
|
|||
|
#ifdef USE_AOMAP
|
|||
|
|
|||
|
uniform sampler2D aoMap;
|
|||
|
uniform float aoMapIntensity;
|
|||
|
|
|||
|
#endif
|
|||
|
`,bj=`
|
|||
|
vec3 transformed = vec3( position );
|
|||
|
`,vj=`
|
|||
|
vec3 objectNormal = vec3( normal );
|
|||
|
`,yj=`
|
|||
|
float punctualLightIntensityToIrradianceFactor( const in float lightDistance, const in float cutoffDistance, const in float decayExponent ) {
|
|||
|
|
|||
|
#if defined ( PHYSICALLY_CORRECT_LIGHTS )
|
|||
|
|
|||
|
// based upon Frostbite 3 Moving to Physically-based Rendering
|
|||
|
// page 32, equation 26: E[window1]
|
|||
|
// https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
|
|||
|
// this is intended to be used on spot and point lights who are represented as luminous intensity
|
|||
|
// but who must be converted to luminous irradiance for surface lighting calculation
|
|||
|
float distanceFalloff = 1.0 / max( pow( lightDistance, decayExponent ), 0.01 );
|
|||
|
|
|||
|
if( cutoffDistance > 0.0 ) {
|
|||
|
|
|||
|
distanceFalloff *= pow2( saturate( 1.0 - pow4( lightDistance / cutoffDistance ) ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
return distanceFalloff;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
if( cutoffDistance > 0.0 && decayExponent > 0.0 ) {
|
|||
|
|
|||
|
return pow( saturate( -lightDistance / cutoffDistance + 1.0 ), decayExponent );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
return 1.0;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 BRDF_Diffuse_Lambert( const in vec3 diffuseColor ) {
|
|||
|
|
|||
|
return RECIPROCAL_PI * diffuseColor;
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
vec3 F_Schlick( const in vec3 specularColor, const in float dotLH ) {
|
|||
|
|
|||
|
// Original approximation by Christophe Schlick '94
|
|||
|
// float fresnel = pow( 1.0 - dotLH, 5.0 );
|
|||
|
|
|||
|
// Optimized variant (presented by Epic at SIGGRAPH '13)
|
|||
|
// https://cdn2.unrealengine.com/Resources/files/2013SiggraphPresentationsNotes-26915738.pdf
|
|||
|
float fresnel = exp2( ( -5.55473 * dotLH - 6.98316 ) * dotLH );
|
|||
|
|
|||
|
return ( 1.0 - specularColor ) * fresnel + specularColor;
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
// Microfacet Models for Refraction through Rough Surfaces - equation (34)
|
|||
|
// http://graphicrants.blogspot.com/2013/08/specular-brdf-reference.html
|
|||
|
// alpha is "roughness squared" in Disney’s reparameterization
|
|||
|
float G_GGX_Smith( const in float alpha, const in float dotNL, const in float dotNV ) {
|
|||
|
|
|||
|
// geometry term (normalized) = G(l)⋅G(v) / 4(n⋅l)(n⋅v)
|
|||
|
// also see #12151
|
|||
|
|
|||
|
float a2 = pow2( alpha );
|
|||
|
|
|||
|
float gl = dotNL + sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );
|
|||
|
float gv = dotNV + sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );
|
|||
|
|
|||
|
return 1.0 / ( gl * gv );
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
// Moving Frostbite to Physically Based Rendering 3.0 - page 12, listing 2
|
|||
|
// https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
|
|||
|
float G_GGX_SmithCorrelated( const in float alpha, const in float dotNL, const in float dotNV ) {
|
|||
|
|
|||
|
float a2 = pow2( alpha );
|
|||
|
|
|||
|
// dotNL and dotNV are explicitly swapped. This is not a mistake.
|
|||
|
float gv = dotNL * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );
|
|||
|
float gl = dotNV * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );
|
|||
|
|
|||
|
return 0.5 / max( gv + gl, EPSILON );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// Microfacet Models for Refraction through Rough Surfaces - equation (33)
|
|||
|
// http://graphicrants.blogspot.com/2013/08/specular-brdf-reference.html
|
|||
|
// alpha is "roughness squared" in Disney’s reparameterization
|
|||
|
float D_GGX( const in float alpha, const in float dotNH ) {
|
|||
|
|
|||
|
float a2 = pow2( alpha );
|
|||
|
|
|||
|
float denom = pow2( dotNH ) * ( a2 - 1.0 ) + 1.0; // avoid alpha = 0 with dotNH = 1
|
|||
|
|
|||
|
return RECIPROCAL_PI * a2 / pow2( denom );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// GGX Distribution, Schlick Fresnel, GGX-Smith Visibility
|
|||
|
vec3 BRDF_Specular_GGX( const in IncidentLight incidentLight, const in GeometricContext geometry, const in vec3 specularColor, const in float roughness ) {
|
|||
|
|
|||
|
float alpha = pow2( roughness ); // UE4's roughness
|
|||
|
|
|||
|
vec3 halfDir = normalize( incidentLight.direction + geometry.viewDir );
|
|||
|
|
|||
|
float dotNL = saturate( dot( geometry.normal, incidentLight.direction ) );
|
|||
|
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
float dotNH = saturate( dot( geometry.normal, halfDir ) );
|
|||
|
float dotLH = saturate( dot( incidentLight.direction, halfDir ) );
|
|||
|
|
|||
|
vec3 F = F_Schlick( specularColor, dotLH );
|
|||
|
|
|||
|
float G = G_GGX_SmithCorrelated( alpha, dotNL, dotNV );
|
|||
|
|
|||
|
float D = D_GGX( alpha, dotNH );
|
|||
|
|
|||
|
return F * ( G * D );
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
// Rect Area Light
|
|||
|
|
|||
|
// Real-Time Polygonal-Light Shading with Linearly Transformed Cosines
|
|||
|
// by Eric Heitz, Jonathan Dupuy, Stephen Hill and David Neubelt
|
|||
|
// code: https://github.com/selfshadow/ltc_code/
|
|||
|
|
|||
|
vec2 LTC_Uv( const in vec3 N, const in vec3 V, const in float roughness ) {
|
|||
|
|
|||
|
const float LUT_SIZE = 64.0;
|
|||
|
const float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;
|
|||
|
const float LUT_BIAS = 0.5 / LUT_SIZE;
|
|||
|
|
|||
|
float dotNV = saturate( dot( N, V ) );
|
|||
|
|
|||
|
// texture parameterized by sqrt( GGX alpha ) and sqrt( 1 - cos( theta ) )
|
|||
|
vec2 uv = vec2( roughness, sqrt( 1.0 - dotNV ) );
|
|||
|
|
|||
|
uv = uv * LUT_SCALE + LUT_BIAS;
|
|||
|
|
|||
|
return uv;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float LTC_ClippedSphereFormFactor( const in vec3 f ) {
|
|||
|
|
|||
|
// Real-Time Area Lighting: a Journey from Research to Production (p.102)
|
|||
|
// An approximation of the form factor of a horizon-clipped rectangle.
|
|||
|
|
|||
|
float l = length( f );
|
|||
|
|
|||
|
return max( ( l * l + f.z ) / ( l + 1.0 ), 0.0 );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 LTC_EdgeVectorFormFactor( const in vec3 v1, const in vec3 v2 ) {
|
|||
|
|
|||
|
float x = dot( v1, v2 );
|
|||
|
|
|||
|
float y = abs( x );
|
|||
|
|
|||
|
// rational polynomial approximation to theta / sin( theta ) / 2PI
|
|||
|
float a = 0.8543985 + ( 0.4965155 + 0.0145206 * y ) * y;
|
|||
|
float b = 3.4175940 + ( 4.1616724 + y ) * y;
|
|||
|
float v = a / b;
|
|||
|
|
|||
|
float theta_sintheta = ( x > 0.0 ) ? v : 0.5 * inversesqrt( max( 1.0 - x * x, 1e-7 ) ) - v;
|
|||
|
|
|||
|
return cross( v1, v2 ) * theta_sintheta;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 LTC_Evaluate( const in vec3 N, const in vec3 V, const in vec3 P, const in mat3 mInv, const in vec3 rectCoords[ 4 ] ) {
|
|||
|
|
|||
|
// bail if point is on back side of plane of light
|
|||
|
// assumes ccw winding order of light vertices
|
|||
|
vec3 v1 = rectCoords[ 1 ] - rectCoords[ 0 ];
|
|||
|
vec3 v2 = rectCoords[ 3 ] - rectCoords[ 0 ];
|
|||
|
vec3 lightNormal = cross( v1, v2 );
|
|||
|
|
|||
|
if( dot( lightNormal, P - rectCoords[ 0 ] ) < 0.0 ) return vec3( 0.0 );
|
|||
|
|
|||
|
// construct orthonormal basis around N
|
|||
|
vec3 T1, T2;
|
|||
|
T1 = normalize( V - N * dot( V, N ) );
|
|||
|
T2 = - cross( N, T1 ); // negated from paper; possibly due to a different handedness of world coordinate system
|
|||
|
|
|||
|
// compute transform
|
|||
|
mat3 mat = mInv * transposeMat3( mat3( T1, T2, N ) );
|
|||
|
|
|||
|
// transform rect
|
|||
|
vec3 coords[ 4 ];
|
|||
|
coords[ 0 ] = mat * ( rectCoords[ 0 ] - P );
|
|||
|
coords[ 1 ] = mat * ( rectCoords[ 1 ] - P );
|
|||
|
coords[ 2 ] = mat * ( rectCoords[ 2 ] - P );
|
|||
|
coords[ 3 ] = mat * ( rectCoords[ 3 ] - P );
|
|||
|
|
|||
|
// project rect onto sphere
|
|||
|
coords[ 0 ] = normalize( coords[ 0 ] );
|
|||
|
coords[ 1 ] = normalize( coords[ 1 ] );
|
|||
|
coords[ 2 ] = normalize( coords[ 2 ] );
|
|||
|
coords[ 3 ] = normalize( coords[ 3 ] );
|
|||
|
|
|||
|
// calculate vector form factor
|
|||
|
vec3 vectorFormFactor = vec3( 0.0 );
|
|||
|
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 0 ], coords[ 1 ] );
|
|||
|
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 1 ], coords[ 2 ] );
|
|||
|
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 2 ], coords[ 3 ] );
|
|||
|
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 3 ], coords[ 0 ] );
|
|||
|
|
|||
|
// adjust for horizon clipping
|
|||
|
float result = LTC_ClippedSphereFormFactor( vectorFormFactor );
|
|||
|
|
|||
|
/*
|
|||
|
// alternate method of adjusting for horizon clipping (see referece)
|
|||
|
// refactoring required
|
|||
|
float len = length( vectorFormFactor );
|
|||
|
float z = vectorFormFactor.z / len;
|
|||
|
|
|||
|
const float LUT_SIZE = 64.0;
|
|||
|
const float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;
|
|||
|
const float LUT_BIAS = 0.5 / LUT_SIZE;
|
|||
|
|
|||
|
// tabulated horizon-clipped sphere, apparently...
|
|||
|
vec2 uv = vec2( z * 0.5 + 0.5, len );
|
|||
|
uv = uv * LUT_SCALE + LUT_BIAS;
|
|||
|
|
|||
|
float scale = texture2D( ltc_2, uv ).w;
|
|||
|
|
|||
|
float result = len * scale;
|
|||
|
*/
|
|||
|
|
|||
|
return vec3( result );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// End Rect Area Light
|
|||
|
|
|||
|
// ref: https://www.unrealengine.com/blog/physically-based-shading-on-mobile - environmentBRDF for GGX on mobile
|
|||
|
vec3 BRDF_Specular_GGX_Environment( const in GeometricContext geometry, const in vec3 specularColor, const in float roughness ) {
|
|||
|
|
|||
|
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
|
|||
|
const vec4 c0 = vec4( - 1, - 0.0275, - 0.572, 0.022 );
|
|||
|
|
|||
|
const vec4 c1 = vec4( 1, 0.0425, 1.04, - 0.04 );
|
|||
|
|
|||
|
vec4 r = roughness * c0 + c1;
|
|||
|
|
|||
|
float a004 = min( r.x * r.x, exp2( - 9.28 * dotNV ) ) * r.x + r.y;
|
|||
|
|
|||
|
vec2 AB = vec2( -1.04, 1.04 ) * a004 + r.zw;
|
|||
|
|
|||
|
return specularColor * AB.x + AB.y;
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
|
|||
|
float G_BlinnPhong_Implicit( /* const in float dotNL, const in float dotNV */ ) {
|
|||
|
|
|||
|
// geometry term is (n dot l)(n dot v) / 4(n dot l)(n dot v)
|
|||
|
return 0.25;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float D_BlinnPhong( const in float shininess, const in float dotNH ) {
|
|||
|
|
|||
|
return RECIPROCAL_PI * ( shininess * 0.5 + 1.0 ) * pow( dotNH, shininess );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 BRDF_Specular_BlinnPhong( const in IncidentLight incidentLight, const in GeometricContext geometry, const in vec3 specularColor, const in float shininess ) {
|
|||
|
|
|||
|
vec3 halfDir = normalize( incidentLight.direction + geometry.viewDir );
|
|||
|
|
|||
|
//float dotNL = saturate( dot( geometry.normal, incidentLight.direction ) );
|
|||
|
//float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
float dotNH = saturate( dot( geometry.normal, halfDir ) );
|
|||
|
float dotLH = saturate( dot( incidentLight.direction, halfDir ) );
|
|||
|
|
|||
|
vec3 F = F_Schlick( specularColor, dotLH );
|
|||
|
|
|||
|
float G = G_BlinnPhong_Implicit( /* dotNL, dotNV */ );
|
|||
|
|
|||
|
float D = D_BlinnPhong( shininess, dotNH );
|
|||
|
|
|||
|
return F * ( G * D );
|
|||
|
|
|||
|
} // validated
|
|||
|
|
|||
|
// source: http://simonstechblog.blogspot.ca/2011/12/microfacet-brdf.html
|
|||
|
float GGXRoughnessToBlinnExponent( const in float ggxRoughness ) {
|
|||
|
return ( 2.0 / pow2( ggxRoughness + 0.0001 ) - 2.0 );
|
|||
|
}
|
|||
|
|
|||
|
float BlinnExponentToGGXRoughness( const in float blinnExponent ) {
|
|||
|
return sqrt( 2.0 / ( blinnExponent + 2.0 ) );
|
|||
|
}
|
|||
|
`,wj=`
|
|||
|
#ifdef USE_BUMPMAP
|
|||
|
|
|||
|
uniform sampler2D bumpMap;
|
|||
|
uniform float bumpScale;
|
|||
|
|
|||
|
// Bump Mapping Unparametrized Surfaces on the GPU by Morten S. Mikkelsen
|
|||
|
// http://api.unrealengine.com/attachments/Engine/Rendering/LightingAndShadows/BumpMappingWithoutTangentSpace/mm_sfgrad_bump.pdf
|
|||
|
|
|||
|
// Evaluate the derivative of the height w.r.t. screen-space using forward differencing (listing 2)
|
|||
|
|
|||
|
vec2 dHdxy_fwd() {
|
|||
|
|
|||
|
vec2 dSTdx = dFdx( vUv );
|
|||
|
vec2 dSTdy = dFdy( vUv );
|
|||
|
|
|||
|
float Hll = bumpScale * texture2D( bumpMap, vUv ).x;
|
|||
|
float dBx = bumpScale * texture2D( bumpMap, vUv + dSTdx ).x - Hll;
|
|||
|
float dBy = bumpScale * texture2D( bumpMap, vUv + dSTdy ).x - Hll;
|
|||
|
|
|||
|
return vec2( dBx, dBy );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 perturbNormalArb( vec3 surf_pos, vec3 surf_norm, vec2 dHdxy ) {
|
|||
|
|
|||
|
// Workaround for Adreno 3XX dFd*( vec3 ) bug. See #9988
|
|||
|
|
|||
|
vec3 vSigmaX = vec3( dFdx( surf_pos.x ), dFdx( surf_pos.y ), dFdx( surf_pos.z ) );
|
|||
|
vec3 vSigmaY = vec3( dFdy( surf_pos.x ), dFdy( surf_pos.y ), dFdy( surf_pos.z ) );
|
|||
|
vec3 vN = surf_norm; // normalized
|
|||
|
|
|||
|
vec3 R1 = cross( vSigmaY, vN );
|
|||
|
vec3 R2 = cross( vN, vSigmaX );
|
|||
|
|
|||
|
float fDet = dot( vSigmaX, R1 );
|
|||
|
|
|||
|
fDet *= ( float( gl_FrontFacing ) * 2.0 - 1.0 );
|
|||
|
|
|||
|
vec3 vGrad = sign( fDet ) * ( dHdxy.x * R1 + dHdxy.y * R2 );
|
|||
|
return normalize( abs( fDet ) * surf_norm - vGrad );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,Cj=`
|
|||
|
#if NUM_CLIPPING_PLANES > 0
|
|||
|
|
|||
|
vec4 plane;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < UNION_CLIPPING_PLANES; i ++ ) {
|
|||
|
|
|||
|
plane = clippingPlanes[ i ];
|
|||
|
if ( dot( vViewPosition, plane.xyz ) > plane.w ) discard;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES
|
|||
|
|
|||
|
bool clipped = true;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {
|
|||
|
|
|||
|
plane = clippingPlanes[ i ];
|
|||
|
clipped = ( dot( vViewPosition, plane.xyz ) > plane.w ) && clipped;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
if ( clipped ) discard;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,kj=`
|
|||
|
#if NUM_CLIPPING_PLANES > 0
|
|||
|
|
|||
|
#if ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
|
|||
|
varying vec3 vViewPosition;
|
|||
|
#endif
|
|||
|
|
|||
|
uniform vec4 clippingPlanes[ NUM_CLIPPING_PLANES ];
|
|||
|
|
|||
|
#endif
|
|||
|
`,Ej=`
|
|||
|
#if NUM_CLIPPING_PLANES > 0 && ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
|
|||
|
varying vec3 vViewPosition;
|
|||
|
#endif
|
|||
|
`,Bj=`
|
|||
|
#if NUM_CLIPPING_PLANES > 0 && ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
#endif
|
|||
|
`,Sj=`
|
|||
|
#ifdef USE_COLOR
|
|||
|
|
|||
|
diffuseColor.rgb *= vColor;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Dj=`
|
|||
|
#ifdef USE_COLOR
|
|||
|
|
|||
|
varying vec3 vColor;
|
|||
|
|
|||
|
#endif
|
|||
|
`,xj=`
|
|||
|
#ifdef USE_COLOR
|
|||
|
|
|||
|
varying vec3 vColor;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Tj=`
|
|||
|
#ifdef USE_COLOR
|
|||
|
|
|||
|
vColor.xyz = color.xyz;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Ij=`
|
|||
|
#define PI 3.14159265359
|
|||
|
#define PI2 6.28318530718
|
|||
|
#define PI_HALF 1.5707963267949
|
|||
|
#define RECIPROCAL_PI 0.31830988618
|
|||
|
#define RECIPROCAL_PI2 0.15915494
|
|||
|
#define LOG2 1.442695
|
|||
|
#define EPSILON 1e-6
|
|||
|
|
|||
|
#define saturate(a) clamp( a, 0.0, 1.0 )
|
|||
|
#define whiteCompliment(a) ( 1.0 - saturate( a ) )
|
|||
|
|
|||
|
float pow2( const in float x ) { return x*x; }
|
|||
|
float pow3( const in float x ) { return x*x*x; }
|
|||
|
float pow4( const in float x ) { float x2 = x*x; return x2*x2; }
|
|||
|
float average( const in vec3 color ) { return dot( color, vec3( 0.3333 ) ); }
|
|||
|
// expects values in the range of [0,1]x[0,1], returns values in the [0,1] range.
|
|||
|
// do not collapse into a single function per: http://byteblacksmith.com/improvements-to-the-canonical-one-liner-glsl-rand-for-opengl-es-2-0/
|
|||
|
highp float rand( const in vec2 uv ) {
|
|||
|
const highp float a = 12.9898, b = 78.233, c = 43758.5453;
|
|||
|
highp float dt = dot( uv.xy, vec2( a,b ) ), sn = mod( dt, PI );
|
|||
|
return fract(sin(sn) * c);
|
|||
|
}
|
|||
|
|
|||
|
struct IncidentLight {
|
|||
|
vec3 color;
|
|||
|
vec3 direction;
|
|||
|
bool visible;
|
|||
|
};
|
|||
|
|
|||
|
struct ReflectedLight {
|
|||
|
vec3 directDiffuse;
|
|||
|
vec3 directSpecular;
|
|||
|
vec3 indirectDiffuse;
|
|||
|
vec3 indirectSpecular;
|
|||
|
};
|
|||
|
|
|||
|
struct GeometricContext {
|
|||
|
vec3 position;
|
|||
|
vec3 normal;
|
|||
|
vec3 viewDir;
|
|||
|
};
|
|||
|
|
|||
|
vec3 transformDirection( in vec3 dir, in mat4 matrix ) {
|
|||
|
|
|||
|
return normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// http://en.wikibooks.org/wiki/GLSL_Programming/Applying_Matrix_Transformations
|
|||
|
vec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {
|
|||
|
|
|||
|
return normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 projectOnPlane(in vec3 point, in vec3 pointOnPlane, in vec3 planeNormal ) {
|
|||
|
|
|||
|
float distance = dot( planeNormal, point - pointOnPlane );
|
|||
|
|
|||
|
return - distance * planeNormal + point;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float sideOfPlane( in vec3 point, in vec3 pointOnPlane, in vec3 planeNormal ) {
|
|||
|
|
|||
|
return sign( dot( point - pointOnPlane, planeNormal ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 linePlaneIntersect( in vec3 pointOnLine, in vec3 lineDirection, in vec3 pointOnPlane, in vec3 planeNormal ) {
|
|||
|
|
|||
|
return lineDirection * ( dot( planeNormal, pointOnPlane - pointOnLine ) / dot( planeNormal, lineDirection ) ) + pointOnLine;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
mat3 transposeMat3( const in mat3 m ) {
|
|||
|
|
|||
|
mat3 tmp;
|
|||
|
|
|||
|
tmp[ 0 ] = vec3( m[ 0 ].x, m[ 1 ].x, m[ 2 ].x );
|
|||
|
tmp[ 1 ] = vec3( m[ 0 ].y, m[ 1 ].y, m[ 2 ].y );
|
|||
|
tmp[ 2 ] = vec3( m[ 0 ].z, m[ 1 ].z, m[ 2 ].z );
|
|||
|
|
|||
|
return tmp;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// https://en.wikipedia.org/wiki/Relative_luminance
|
|||
|
float linearToRelativeLuminance( const in vec3 color ) {
|
|||
|
|
|||
|
vec3 weights = vec3( 0.2126, 0.7152, 0.0722 );
|
|||
|
|
|||
|
return dot( weights, color.rgb );
|
|||
|
|
|||
|
}
|
|||
|
`,Pj=`
|
|||
|
#ifdef ENVMAP_TYPE_CUBE_UV
|
|||
|
|
|||
|
#define cubeUV_textureSize (1024.0)
|
|||
|
|
|||
|
int getFaceFromDirection(vec3 direction) {
|
|||
|
vec3 absDirection = abs(direction);
|
|||
|
int face = -1;
|
|||
|
if( absDirection.x > absDirection.z ) {
|
|||
|
if(absDirection.x > absDirection.y )
|
|||
|
face = direction.x > 0.0 ? 0 : 3;
|
|||
|
else
|
|||
|
face = direction.y > 0.0 ? 1 : 4;
|
|||
|
}
|
|||
|
else {
|
|||
|
if(absDirection.z > absDirection.y )
|
|||
|
face = direction.z > 0.0 ? 2 : 5;
|
|||
|
else
|
|||
|
face = direction.y > 0.0 ? 1 : 4;
|
|||
|
}
|
|||
|
return face;
|
|||
|
}
|
|||
|
#define cubeUV_maxLods1 (log2(cubeUV_textureSize*0.25) - 1.0)
|
|||
|
#define cubeUV_rangeClamp (exp2((6.0 - 1.0) * 2.0))
|
|||
|
|
|||
|
vec2 MipLevelInfo( vec3 vec, float roughnessLevel, float roughness ) {
|
|||
|
float scale = exp2(cubeUV_maxLods1 - roughnessLevel);
|
|||
|
float dxRoughness = dFdx(roughness);
|
|||
|
float dyRoughness = dFdy(roughness);
|
|||
|
vec3 dx = dFdx( vec * scale * dxRoughness );
|
|||
|
vec3 dy = dFdy( vec * scale * dyRoughness );
|
|||
|
float d = max( dot( dx, dx ), dot( dy, dy ) );
|
|||
|
// Clamp the value to the max mip level counts. hard coded to 6 mips
|
|||
|
d = clamp(d, 1.0, cubeUV_rangeClamp);
|
|||
|
float mipLevel = 0.5 * log2(d);
|
|||
|
return vec2(floor(mipLevel), fract(mipLevel));
|
|||
|
}
|
|||
|
|
|||
|
#define cubeUV_maxLods2 (log2(cubeUV_textureSize*0.25) - 2.0)
|
|||
|
#define cubeUV_rcpTextureSize (1.0 / cubeUV_textureSize)
|
|||
|
|
|||
|
vec2 getCubeUV(vec3 direction, float roughnessLevel, float mipLevel) {
|
|||
|
mipLevel = roughnessLevel > cubeUV_maxLods2 - 3.0 ? 0.0 : mipLevel;
|
|||
|
float a = 16.0 * cubeUV_rcpTextureSize;
|
|||
|
|
|||
|
vec2 exp2_packed = exp2( vec2( roughnessLevel, mipLevel ) );
|
|||
|
vec2 rcp_exp2_packed = vec2( 1.0 ) / exp2_packed;
|
|||
|
// float powScale = exp2(roughnessLevel + mipLevel);
|
|||
|
float powScale = exp2_packed.x * exp2_packed.y;
|
|||
|
// float scale = 1.0 / exp2(roughnessLevel + 2.0 + mipLevel);
|
|||
|
float scale = rcp_exp2_packed.x * rcp_exp2_packed.y * 0.25;
|
|||
|
// float mipOffset = 0.75*(1.0 - 1.0/exp2(mipLevel))/exp2(roughnessLevel);
|
|||
|
float mipOffset = 0.75*(1.0 - rcp_exp2_packed.y) * rcp_exp2_packed.x;
|
|||
|
|
|||
|
bool bRes = mipLevel == 0.0;
|
|||
|
scale = bRes && (scale < a) ? a : scale;
|
|||
|
|
|||
|
vec3 r;
|
|||
|
vec2 offset;
|
|||
|
int face = getFaceFromDirection(direction);
|
|||
|
|
|||
|
float rcpPowScale = 1.0 / powScale;
|
|||
|
|
|||
|
if( face == 0) {
|
|||
|
r = vec3(direction.x, -direction.z, direction.y);
|
|||
|
offset = vec2(0.0+mipOffset,0.75 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
|
|||
|
}
|
|||
|
else if( face == 1) {
|
|||
|
r = vec3(direction.y, direction.x, direction.z);
|
|||
|
offset = vec2(scale+mipOffset, 0.75 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
|
|||
|
}
|
|||
|
else if( face == 2) {
|
|||
|
r = vec3(direction.z, direction.x, direction.y);
|
|||
|
offset = vec2(2.0*scale+mipOffset, 0.75 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
|
|||
|
}
|
|||
|
else if( face == 3) {
|
|||
|
r = vec3(direction.x, direction.z, direction.y);
|
|||
|
offset = vec2(0.0+mipOffset,0.5 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
|
|||
|
}
|
|||
|
else if( face == 4) {
|
|||
|
r = vec3(direction.y, direction.x, -direction.z);
|
|||
|
offset = vec2(scale+mipOffset, 0.5 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
|
|||
|
}
|
|||
|
else {
|
|||
|
r = vec3(direction.z, -direction.x, direction.y);
|
|||
|
offset = vec2(2.0*scale+mipOffset, 0.5 * rcpPowScale);
|
|||
|
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
|
|||
|
}
|
|||
|
r = normalize(r);
|
|||
|
float texelOffset = 0.5 * cubeUV_rcpTextureSize;
|
|||
|
vec2 s = ( r.yz / abs( r.x ) + vec2( 1.0 ) ) * 0.5;
|
|||
|
vec2 base = offset + vec2( texelOffset );
|
|||
|
return base + s * ( scale - 2.0 * texelOffset );
|
|||
|
}
|
|||
|
|
|||
|
#define cubeUV_maxLods3 (log2(cubeUV_textureSize*0.25) - 3.0)
|
|||
|
|
|||
|
vec4 textureCubeUV( sampler2D envMap, vec3 reflectedDirection, float roughness ) {
|
|||
|
float roughnessVal = roughness* cubeUV_maxLods3;
|
|||
|
float r1 = floor(roughnessVal);
|
|||
|
float r2 = r1 + 1.0;
|
|||
|
float t = fract(roughnessVal);
|
|||
|
vec2 mipInfo = MipLevelInfo(reflectedDirection, r1, roughness);
|
|||
|
float s = mipInfo.y;
|
|||
|
float level0 = mipInfo.x;
|
|||
|
float level1 = level0 + 1.0;
|
|||
|
level1 = level1 > 5.0 ? 5.0 : level1;
|
|||
|
|
|||
|
// round to nearest mipmap if we are not interpolating.
|
|||
|
level0 += min( floor( s + 0.5 ), 5.0 );
|
|||
|
|
|||
|
// Tri linear interpolation.
|
|||
|
vec2 uv_10 = getCubeUV(reflectedDirection, r1, level0);
|
|||
|
vec4 color10 = envMapTexelToLinear(texture2D(envMap, uv_10));
|
|||
|
|
|||
|
vec2 uv_20 = getCubeUV(reflectedDirection, r2, level0);
|
|||
|
vec4 color20 = envMapTexelToLinear(texture2D(envMap, uv_20));
|
|||
|
|
|||
|
vec4 result = mix(color10, color20, t);
|
|||
|
|
|||
|
return vec4(result.rgb, 1.0);
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,Mj=`
|
|||
|
vec3 transformedNormal = normalMatrix * objectNormal;
|
|||
|
|
|||
|
#ifdef FLIP_SIDED
|
|||
|
|
|||
|
transformedNormal = - transformedNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Lj=`
|
|||
|
#ifdef USE_DISPLACEMENTMAP
|
|||
|
|
|||
|
uniform sampler2D displacementMap;
|
|||
|
uniform float displacementScale;
|
|||
|
uniform float displacementBias;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Rj=`
|
|||
|
#ifdef USE_DISPLACEMENTMAP
|
|||
|
|
|||
|
transformed += normalize( objectNormal ) * ( texture2D( displacementMap, uv ).x * displacementScale + displacementBias );
|
|||
|
|
|||
|
#endif
|
|||
|
`,Uj=`
|
|||
|
#ifdef USE_EMISSIVEMAP
|
|||
|
|
|||
|
vec4 emissiveColor = texture2D( emissiveMap, vUv );
|
|||
|
|
|||
|
emissiveColor.rgb = emissiveMapTexelToLinear( emissiveColor ).rgb;
|
|||
|
|
|||
|
totalEmissiveRadiance *= emissiveColor.rgb;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Oj=`
|
|||
|
#ifdef USE_EMISSIVEMAP
|
|||
|
|
|||
|
uniform sampler2D emissiveMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Nj=`
|
|||
|
gl_FragColor = linearToOutputTexel( gl_FragColor );
|
|||
|
`,Hj=`
|
|||
|
// For a discussion of what this is, please read this: http://lousodrome.net/blog/light/2013/05/26/gamma-correct-and-hdr-rendering-in-a-32-bits-buffer/
|
|||
|
|
|||
|
vec4 LinearToLinear( in vec4 value ) {
|
|||
|
return value;
|
|||
|
}
|
|||
|
|
|||
|
vec4 GammaToLinear( in vec4 value, in float gammaFactor ) {
|
|||
|
return vec4( pow( value.rgb, vec3( gammaFactor ) ), value.a );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearToGamma( in vec4 value, in float gammaFactor ) {
|
|||
|
return vec4( pow( value.rgb, vec3( 1.0 / gammaFactor ) ), value.a );
|
|||
|
}
|
|||
|
|
|||
|
vec4 sRGBToLinear( in vec4 value ) {
|
|||
|
return vec4( mix( pow( value.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), value.rgb * 0.0773993808, vec3( lessThanEqual( value.rgb, vec3( 0.04045 ) ) ) ), value.a );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearTosRGB( in vec4 value ) {
|
|||
|
return vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );
|
|||
|
}
|
|||
|
|
|||
|
vec4 RGBEToLinear( in vec4 value ) {
|
|||
|
return vec4( value.rgb * exp2( value.a * 255.0 - 128.0 ), 1.0 );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearToRGBE( in vec4 value ) {
|
|||
|
float maxComponent = max( max( value.r, value.g ), value.b );
|
|||
|
float fExp = clamp( ceil( log2( maxComponent ) ), -128.0, 127.0 );
|
|||
|
return vec4( value.rgb / exp2( fExp ), ( fExp + 128.0 ) / 255.0 );
|
|||
|
// return vec4( value.brg, ( 3.0 + 128.0 ) / 256.0 );
|
|||
|
}
|
|||
|
|
|||
|
// reference: http://iwasbeingirony.blogspot.ca/2010/06/difference-between-rgbm-and-rgbd.html
|
|||
|
vec4 RGBMToLinear( in vec4 value, in float maxRange ) {
|
|||
|
return vec4( value.rgb * value.a * maxRange, 1.0 );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearToRGBM( in vec4 value, in float maxRange ) {
|
|||
|
float maxRGB = max( value.r, max( value.g, value.b ) );
|
|||
|
float M = clamp( maxRGB / maxRange, 0.0, 1.0 );
|
|||
|
M = ceil( M * 255.0 ) / 255.0;
|
|||
|
return vec4( value.rgb / ( M * maxRange ), M );
|
|||
|
}
|
|||
|
|
|||
|
// reference: http://iwasbeingirony.blogspot.ca/2010/06/difference-between-rgbm-and-rgbd.html
|
|||
|
vec4 RGBDToLinear( in vec4 value, in float maxRange ) {
|
|||
|
return vec4( value.rgb * ( ( maxRange / 255.0 ) / value.a ), 1.0 );
|
|||
|
}
|
|||
|
|
|||
|
vec4 LinearToRGBD( in vec4 value, in float maxRange ) {
|
|||
|
float maxRGB = max( value.r, max( value.g, value.b ) );
|
|||
|
float D = max( maxRange / maxRGB, 1.0 );
|
|||
|
D = min( floor( D ) / 255.0, 1.0 );
|
|||
|
return vec4( value.rgb * ( D * ( 255.0 / maxRange ) ), D );
|
|||
|
}
|
|||
|
|
|||
|
// LogLuv reference: http://graphicrants.blogspot.ca/2009/04/rgbm-color-encoding.html
|
|||
|
|
|||
|
// M matrix, for encoding
|
|||
|
const mat3 cLogLuvM = mat3( 0.2209, 0.3390, 0.4184, 0.1138, 0.6780, 0.7319, 0.0102, 0.1130, 0.2969 );
|
|||
|
vec4 LinearToLogLuv( in vec4 value ) {
|
|||
|
vec3 Xp_Y_XYZp = value.rgb * cLogLuvM;
|
|||
|
Xp_Y_XYZp = max( Xp_Y_XYZp, vec3( 1e-6, 1e-6, 1e-6 ) );
|
|||
|
vec4 vResult;
|
|||
|
vResult.xy = Xp_Y_XYZp.xy / Xp_Y_XYZp.z;
|
|||
|
float Le = 2.0 * log2(Xp_Y_XYZp.y) + 127.0;
|
|||
|
vResult.w = fract( Le );
|
|||
|
vResult.z = ( Le - ( floor( vResult.w * 255.0 ) ) / 255.0 ) / 255.0;
|
|||
|
return vResult;
|
|||
|
}
|
|||
|
|
|||
|
// Inverse M matrix, for decoding
|
|||
|
const mat3 cLogLuvInverseM = mat3( 6.0014, -2.7008, -1.7996, -1.3320, 3.1029, -5.7721, 0.3008, -1.0882, 5.6268 );
|
|||
|
vec4 LogLuvToLinear( in vec4 value ) {
|
|||
|
float Le = value.z * 255.0 + value.w;
|
|||
|
vec3 Xp_Y_XYZp;
|
|||
|
Xp_Y_XYZp.y = exp2( ( Le - 127.0 ) / 2.0 );
|
|||
|
Xp_Y_XYZp.z = Xp_Y_XYZp.y / value.y;
|
|||
|
Xp_Y_XYZp.x = value.x * Xp_Y_XYZp.z;
|
|||
|
vec3 vRGB = Xp_Y_XYZp.rgb * cLogLuvInverseM;
|
|||
|
return vec4( max( vRGB, 0.0 ), 1.0 );
|
|||
|
}
|
|||
|
`,Qj=`
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
|
|||
|
|
|||
|
vec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );
|
|||
|
|
|||
|
// Transforming Normal Vectors with the Inverse Transformation
|
|||
|
vec3 worldNormal = inverseTransformDirection( normal, viewMatrix );
|
|||
|
|
|||
|
#ifdef ENVMAP_MODE_REFLECTION
|
|||
|
|
|||
|
vec3 reflectVec = reflect( cameraToVertex, worldNormal );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 reflectVec = refract( cameraToVertex, worldNormal, refractionRatio );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 reflectVec = vReflect;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifdef ENVMAP_TYPE_CUBE
|
|||
|
|
|||
|
vec4 envColor = textureCube( envMap, vec3( flipEnvMap * reflectVec.x, reflectVec.yz ) );
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_EQUIREC )
|
|||
|
|
|||
|
vec2 sampleUV;
|
|||
|
|
|||
|
reflectVec = normalize( reflectVec );
|
|||
|
|
|||
|
sampleUV.y = asin( clamp( reflectVec.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
|
|||
|
|
|||
|
sampleUV.x = atan( reflectVec.z, reflectVec.x ) * RECIPROCAL_PI2 + 0.5;
|
|||
|
|
|||
|
vec4 envColor = texture2D( envMap, sampleUV );
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_SPHERE )
|
|||
|
|
|||
|
reflectVec = normalize( reflectVec );
|
|||
|
|
|||
|
vec3 reflectView = normalize( ( viewMatrix * vec4( reflectVec, 0.0 ) ).xyz + vec3( 0.0, 0.0, 1.0 ) );
|
|||
|
|
|||
|
vec4 envColor = texture2D( envMap, reflectView.xy * 0.5 + 0.5 );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envColor = vec4( 0.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envColor = envMapTexelToLinear( envColor );
|
|||
|
|
|||
|
#ifdef ENVMAP_BLENDING_MULTIPLY
|
|||
|
|
|||
|
outgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );
|
|||
|
|
|||
|
#elif defined( ENVMAP_BLENDING_MIX )
|
|||
|
|
|||
|
outgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );
|
|||
|
|
|||
|
#elif defined( ENVMAP_BLENDING_ADD )
|
|||
|
|
|||
|
outgoingLight += envColor.xyz * specularStrength * reflectivity;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,jj=`
|
|||
|
#if defined( USE_ENVMAP ) || defined( PHYSICAL )
|
|||
|
uniform float reflectivity;
|
|||
|
uniform float envMapIntensity;
|
|||
|
#endif
|
|||
|
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#if ! defined( PHYSICAL ) && ( defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) )
|
|||
|
varying vec3 vWorldPosition;
|
|||
|
#endif
|
|||
|
|
|||
|
#ifdef ENVMAP_TYPE_CUBE
|
|||
|
uniform samplerCube envMap;
|
|||
|
#else
|
|||
|
uniform sampler2D envMap;
|
|||
|
#endif
|
|||
|
uniform float flipEnvMap;
|
|||
|
uniform int maxMipLevel;
|
|||
|
|
|||
|
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) || defined( PHYSICAL )
|
|||
|
uniform float refractionRatio;
|
|||
|
#else
|
|||
|
varying vec3 vReflect;
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,Gj=`
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
|
|||
|
varying vec3 vWorldPosition;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
varying vec3 vReflect;
|
|||
|
uniform float refractionRatio;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,zj=`
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
|
|||
|
|
|||
|
vWorldPosition = worldPosition.xyz;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 cameraToVertex = normalize( worldPosition.xyz - cameraPosition );
|
|||
|
|
|||
|
vec3 worldNormal = inverseTransformDirection( transformedNormal, viewMatrix );
|
|||
|
|
|||
|
#ifdef ENVMAP_MODE_REFLECTION
|
|||
|
|
|||
|
vReflect = reflect( cameraToVertex, worldNormal );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vReflect = refract( cameraToVertex, worldNormal, refractionRatio );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,qj=`
|
|||
|
#ifdef USE_FOG
|
|||
|
|
|||
|
fogDepth = -mvPosition.z;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Vj=`
|
|||
|
#ifdef USE_FOG
|
|||
|
|
|||
|
varying float fogDepth;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Wj=`
|
|||
|
#ifdef USE_FOG
|
|||
|
|
|||
|
#ifdef FOG_EXP2
|
|||
|
|
|||
|
float fogFactor = whiteCompliment( exp2( - fogDensity * fogDensity * fogDepth * fogDepth * LOG2 ) );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
float fogFactor = smoothstep( fogNear, fogFar, fogDepth );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
gl_FragColor.rgb = mix( gl_FragColor.rgb, fogColor, fogFactor );
|
|||
|
|
|||
|
#endif
|
|||
|
`,Kj=`
|
|||
|
#ifdef USE_FOG
|
|||
|
|
|||
|
uniform vec3 fogColor;
|
|||
|
varying float fogDepth;
|
|||
|
|
|||
|
#ifdef FOG_EXP2
|
|||
|
|
|||
|
uniform float fogDensity;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
uniform float fogNear;
|
|||
|
uniform float fogFar;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,Yj=`
|
|||
|
#ifdef TOON
|
|||
|
|
|||
|
uniform sampler2D gradientMap;
|
|||
|
|
|||
|
vec3 getGradientIrradiance( vec3 normal, vec3 lightDirection ) {
|
|||
|
|
|||
|
// dotNL will be from -1.0 to 1.0
|
|||
|
float dotNL = dot( normal, lightDirection );
|
|||
|
vec2 coord = vec2( dotNL * 0.5 + 0.5, 0.0 );
|
|||
|
|
|||
|
#ifdef USE_GRADIENTMAP
|
|||
|
|
|||
|
return texture2D( gradientMap, coord ).rgb;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
return ( coord.x < 0.7 ) ? vec3( 0.7 ) : vec3( 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,Xj=`
|
|||
|
#ifdef USE_LIGHTMAP
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += PI * texture2D( lightMap, vUv2 ).xyz * lightMapIntensity; // factor of PI should not be present; included here to prevent breakage
|
|||
|
|
|||
|
#endif
|
|||
|
`,Jj=`
|
|||
|
#ifdef USE_LIGHTMAP
|
|||
|
|
|||
|
uniform sampler2D lightMap;
|
|||
|
uniform float lightMapIntensity;
|
|||
|
|
|||
|
#endif
|
|||
|
`,Zj=`
|
|||
|
vec3 diffuse = vec3( 1.0 );
|
|||
|
|
|||
|
GeometricContext geometry;
|
|||
|
geometry.position = mvPosition.xyz;
|
|||
|
geometry.normal = normalize( transformedNormal );
|
|||
|
geometry.viewDir = normalize( -mvPosition.xyz );
|
|||
|
|
|||
|
GeometricContext backGeometry;
|
|||
|
backGeometry.position = geometry.position;
|
|||
|
backGeometry.normal = -geometry.normal;
|
|||
|
backGeometry.viewDir = geometry.viewDir;
|
|||
|
|
|||
|
vLightFront = vec3( 0.0 );
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
vLightBack = vec3( 0.0 );
|
|||
|
#endif
|
|||
|
|
|||
|
IncidentLight directLight;
|
|||
|
float dotNL;
|
|||
|
vec3 directLightColor_Diffuse;
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
getPointDirectLightIrradiance( pointLights[ i ], geometry, directLight );
|
|||
|
|
|||
|
dotNL = dot( geometry.normal, directLight.direction );
|
|||
|
directLightColor_Diffuse = PI * directLight.color;
|
|||
|
|
|||
|
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
getSpotDirectLightIrradiance( spotLights[ i ], geometry, directLight );
|
|||
|
|
|||
|
dotNL = dot( geometry.normal, directLight.direction );
|
|||
|
directLightColor_Diffuse = PI * directLight.color;
|
|||
|
|
|||
|
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#endif
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
for ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
// TODO (abelnation): implement
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
getDirectionalDirectLightIrradiance( directionalLights[ i ], geometry, directLight );
|
|||
|
|
|||
|
dotNL = dot( geometry.normal, directLight.direction );
|
|||
|
directLightColor_Diffuse = PI * directLight.color;
|
|||
|
|
|||
|
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_HEMI_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
vLightFront += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry );
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
vLightBack += getHemisphereLightIrradiance( hemisphereLights[ i ], backGeometry );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,$j=`
|
|||
|
uniform vec3 ambientLightColor;
|
|||
|
|
|||
|
vec3 getAmbientLightIrradiance( const in vec3 ambientLightColor ) {
|
|||
|
|
|||
|
vec3 irradiance = ambientLightColor;
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
irradiance *= PI;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return irradiance;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
struct DirectionalLight {
|
|||
|
vec3 direction;
|
|||
|
vec3 color;
|
|||
|
|
|||
|
int shadow;
|
|||
|
float shadowBias;
|
|||
|
float shadowRadius;
|
|||
|
vec2 shadowMapSize;
|
|||
|
};
|
|||
|
|
|||
|
uniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];
|
|||
|
|
|||
|
void getDirectionalDirectLightIrradiance( const in DirectionalLight directionalLight, const in GeometricContext geometry, out IncidentLight directLight ) {
|
|||
|
|
|||
|
directLight.color = directionalLight.color;
|
|||
|
directLight.direction = directionalLight.direction;
|
|||
|
directLight.visible = true;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
struct PointLight {
|
|||
|
vec3 position;
|
|||
|
vec3 color;
|
|||
|
float distance;
|
|||
|
float decay;
|
|||
|
|
|||
|
int shadow;
|
|||
|
float shadowBias;
|
|||
|
float shadowRadius;
|
|||
|
vec2 shadowMapSize;
|
|||
|
float shadowCameraNear;
|
|||
|
float shadowCameraFar;
|
|||
|
};
|
|||
|
|
|||
|
uniform PointLight pointLights[ NUM_POINT_LIGHTS ];
|
|||
|
|
|||
|
// directLight is an out parameter as having it as a return value caused compiler errors on some devices
|
|||
|
void getPointDirectLightIrradiance( const in PointLight pointLight, const in GeometricContext geometry, out IncidentLight directLight ) {
|
|||
|
|
|||
|
vec3 lVector = pointLight.position - geometry.position;
|
|||
|
directLight.direction = normalize( lVector );
|
|||
|
|
|||
|
float lightDistance = length( lVector );
|
|||
|
|
|||
|
directLight.color = pointLight.color;
|
|||
|
directLight.color *= punctualLightIntensityToIrradianceFactor( lightDistance, pointLight.distance, pointLight.decay );
|
|||
|
directLight.visible = ( directLight.color != vec3( 0.0 ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
struct SpotLight {
|
|||
|
vec3 position;
|
|||
|
vec3 direction;
|
|||
|
vec3 color;
|
|||
|
float distance;
|
|||
|
float decay;
|
|||
|
float coneCos;
|
|||
|
float penumbraCos;
|
|||
|
|
|||
|
int shadow;
|
|||
|
float shadowBias;
|
|||
|
float shadowRadius;
|
|||
|
vec2 shadowMapSize;
|
|||
|
};
|
|||
|
|
|||
|
uniform SpotLight spotLights[ NUM_SPOT_LIGHTS ];
|
|||
|
|
|||
|
// directLight is an out parameter as having it as a return value caused compiler errors on some devices
|
|||
|
void getSpotDirectLightIrradiance( const in SpotLight spotLight, const in GeometricContext geometry, out IncidentLight directLight ) {
|
|||
|
|
|||
|
vec3 lVector = spotLight.position - geometry.position;
|
|||
|
directLight.direction = normalize( lVector );
|
|||
|
|
|||
|
float lightDistance = length( lVector );
|
|||
|
float angleCos = dot( directLight.direction, spotLight.direction );
|
|||
|
|
|||
|
if ( angleCos > spotLight.coneCos ) {
|
|||
|
|
|||
|
float spotEffect = smoothstep( spotLight.coneCos, spotLight.penumbraCos, angleCos );
|
|||
|
|
|||
|
directLight.color = spotLight.color;
|
|||
|
directLight.color *= spotEffect * punctualLightIntensityToIrradianceFactor( lightDistance, spotLight.distance, spotLight.decay );
|
|||
|
directLight.visible = true;
|
|||
|
|
|||
|
} else {
|
|||
|
|
|||
|
directLight.color = vec3( 0.0 );
|
|||
|
directLight.visible = false;
|
|||
|
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
struct RectAreaLight {
|
|||
|
vec3 color;
|
|||
|
vec3 position;
|
|||
|
vec3 halfWidth;
|
|||
|
vec3 halfHeight;
|
|||
|
};
|
|||
|
|
|||
|
// Pre-computed values of LinearTransformedCosine approximation of BRDF
|
|||
|
// BRDF approximation Texture is 64x64
|
|||
|
uniform sampler2D ltc_1; // RGBA Float
|
|||
|
uniform sampler2D ltc_2; // RGBA Float
|
|||
|
|
|||
|
uniform RectAreaLight rectAreaLights[ NUM_RECT_AREA_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
#if NUM_HEMI_LIGHTS > 0
|
|||
|
|
|||
|
struct HemisphereLight {
|
|||
|
vec3 direction;
|
|||
|
vec3 skyColor;
|
|||
|
vec3 groundColor;
|
|||
|
};
|
|||
|
|
|||
|
uniform HemisphereLight hemisphereLights[ NUM_HEMI_LIGHTS ];
|
|||
|
|
|||
|
vec3 getHemisphereLightIrradiance( const in HemisphereLight hemiLight, const in GeometricContext geometry ) {
|
|||
|
|
|||
|
float dotNL = dot( geometry.normal, hemiLight.direction );
|
|||
|
float hemiDiffuseWeight = 0.5 * dotNL + 0.5;
|
|||
|
|
|||
|
vec3 irradiance = mix( hemiLight.groundColor, hemiLight.skyColor, hemiDiffuseWeight );
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
irradiance *= PI;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return irradiance;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,eG=`
|
|||
|
#if defined( USE_ENVMAP ) && defined( PHYSICAL )
|
|||
|
|
|||
|
vec3 getLightProbeIndirectIrradiance( /*const in SpecularLightProbe specularLightProbe,*/ const in GeometricContext geometry, const in int maxMIPLevel ) {
|
|||
|
|
|||
|
vec3 worldNormal = inverseTransformDirection( geometry.normal, viewMatrix );
|
|||
|
|
|||
|
#ifdef ENVMAP_TYPE_CUBE
|
|||
|
|
|||
|
vec3 queryVec = vec3( flipEnvMap * worldNormal.x, worldNormal.yz );
|
|||
|
|
|||
|
// TODO: replace with properly filtered cubemaps and access the irradiance LOD level, be it the last LOD level
|
|||
|
// of a specular cubemap, or just the default level of a specially created irradiance cubemap.
|
|||
|
|
|||
|
#ifdef TEXTURE_LOD_EXT
|
|||
|
|
|||
|
vec4 envMapColor = textureCubeLodEXT( envMap, queryVec, float( maxMIPLevel ) );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
// force the bias high to get the last LOD level as it is the most blurred.
|
|||
|
vec4 envMapColor = textureCube( envMap, queryVec, float( maxMIPLevel ) );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_CUBE_UV )
|
|||
|
|
|||
|
vec3 queryVec = vec3( flipEnvMap * worldNormal.x, worldNormal.yz );
|
|||
|
vec4 envMapColor = textureCubeUV( envMap, queryVec, 1.0 );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envMapColor = vec4( 0.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return PI * envMapColor.rgb * envMapIntensity;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// taken from here: http://casual-effects.blogspot.ca/2011/08/plausible-environment-lighting-in-two.html
|
|||
|
float getSpecularMIPLevel( const in float blinnShininessExponent, const in int maxMIPLevel ) {
|
|||
|
|
|||
|
//float envMapWidth = pow( 2.0, maxMIPLevelScalar );
|
|||
|
//float desiredMIPLevel = log2( envMapWidth * sqrt( 3.0 ) ) - 0.5 * log2( pow2( blinnShininessExponent ) + 1.0 );
|
|||
|
|
|||
|
float maxMIPLevelScalar = float( maxMIPLevel );
|
|||
|
float desiredMIPLevel = maxMIPLevelScalar + 0.79248 - 0.5 * log2( pow2( blinnShininessExponent ) + 1.0 );
|
|||
|
|
|||
|
// clamp to allowable LOD ranges.
|
|||
|
return clamp( desiredMIPLevel, 0.0, maxMIPLevelScalar );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 getLightProbeIndirectRadiance( /*const in SpecularLightProbe specularLightProbe,*/ const in GeometricContext geometry, const in float blinnShininessExponent, const in int maxMIPLevel ) {
|
|||
|
|
|||
|
#ifdef ENVMAP_MODE_REFLECTION
|
|||
|
|
|||
|
vec3 reflectVec = reflect( -geometry.viewDir, geometry.normal );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 reflectVec = refract( -geometry.viewDir, geometry.normal, refractionRatio );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
reflectVec = inverseTransformDirection( reflectVec, viewMatrix );
|
|||
|
|
|||
|
float specularMIPLevel = getSpecularMIPLevel( blinnShininessExponent, maxMIPLevel );
|
|||
|
|
|||
|
#ifdef ENVMAP_TYPE_CUBE
|
|||
|
|
|||
|
vec3 queryReflectVec = vec3( flipEnvMap * reflectVec.x, reflectVec.yz );
|
|||
|
|
|||
|
#ifdef TEXTURE_LOD_EXT
|
|||
|
|
|||
|
vec4 envMapColor = textureCubeLodEXT( envMap, queryReflectVec, specularMIPLevel );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envMapColor = textureCube( envMap, queryReflectVec, specularMIPLevel );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_CUBE_UV )
|
|||
|
|
|||
|
vec3 queryReflectVec = vec3( flipEnvMap * reflectVec.x, reflectVec.yz );
|
|||
|
vec4 envMapColor = textureCubeUV( envMap, queryReflectVec, BlinnExponentToGGXRoughness(blinnShininessExponent ));
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_EQUIREC )
|
|||
|
|
|||
|
vec2 sampleUV;
|
|||
|
sampleUV.y = asin( clamp( reflectVec.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
|
|||
|
sampleUV.x = atan( reflectVec.z, reflectVec.x ) * RECIPROCAL_PI2 + 0.5;
|
|||
|
|
|||
|
#ifdef TEXTURE_LOD_EXT
|
|||
|
|
|||
|
vec4 envMapColor = texture2DLodEXT( envMap, sampleUV, specularMIPLevel );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envMapColor = texture2D( envMap, sampleUV, specularMIPLevel );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
|
|||
|
|
|||
|
#elif defined( ENVMAP_TYPE_SPHERE )
|
|||
|
|
|||
|
vec3 reflectView = normalize( ( viewMatrix * vec4( reflectVec, 0.0 ) ).xyz + vec3( 0.0,0.0,1.0 ) );
|
|||
|
|
|||
|
#ifdef TEXTURE_LOD_EXT
|
|||
|
|
|||
|
vec4 envMapColor = texture2DLodEXT( envMap, reflectView.xy * 0.5 + 0.5, specularMIPLevel );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 envMapColor = texture2D( envMap, reflectView.xy * 0.5 + 0.5, specularMIPLevel );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return envMapColor.rgb * envMapIntensity;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,tG=`
|
|||
|
BlinnPhongMaterial material;
|
|||
|
material.diffuseColor = diffuseColor.rgb;
|
|||
|
material.specularColor = specular;
|
|||
|
material.specularShininess = shininess;
|
|||
|
material.specularStrength = specularStrength;
|
|||
|
`,iG=`
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
|
|||
|
struct BlinnPhongMaterial {
|
|||
|
|
|||
|
vec3 diffuseColor;
|
|||
|
vec3 specularColor;
|
|||
|
float specularShininess;
|
|||
|
float specularStrength;
|
|||
|
|
|||
|
};
|
|||
|
|
|||
|
void RE_Direct_BlinnPhong( const in IncidentLight directLight, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
#ifdef TOON
|
|||
|
|
|||
|
vec3 irradiance = getGradientIrradiance( geometry.normal, directLight.direction ) * directLight.color;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
float dotNL = saturate( dot( geometry.normal, directLight.direction ) );
|
|||
|
vec3 irradiance = dotNL * directLight.color;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
irradiance *= PI; // punctual light
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
reflectedLight.directDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
|
|||
|
|
|||
|
reflectedLight.directSpecular += irradiance * BRDF_Specular_BlinnPhong( directLight, geometry, material.specularColor, material.specularShininess ) * material.specularStrength;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
void RE_IndirectDiffuse_BlinnPhong( const in vec3 irradiance, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#define RE_Direct RE_Direct_BlinnPhong
|
|||
|
#define RE_IndirectDiffuse RE_IndirectDiffuse_BlinnPhong
|
|||
|
|
|||
|
#define Material_LightProbeLOD( material ) (0)
|
|||
|
`,nG=`
|
|||
|
PhysicalMaterial material;
|
|||
|
material.diffuseColor = diffuseColor.rgb * ( 1.0 - metalnessFactor );
|
|||
|
material.specularRoughness = clamp( roughnessFactor, 0.04, 1.0 );
|
|||
|
#ifdef STANDARD
|
|||
|
material.specularColor = mix( vec3( DEFAULT_SPECULAR_COEFFICIENT ), diffuseColor.rgb, metalnessFactor );
|
|||
|
#else
|
|||
|
material.specularColor = mix( vec3( MAXIMUM_SPECULAR_COEFFICIENT * pow2( reflectivity ) ), diffuseColor.rgb, metalnessFactor );
|
|||
|
material.clearCoat = saturate( clearCoat ); // Burley clearcoat model
|
|||
|
material.clearCoatRoughness = clamp( clearCoatRoughness, 0.04, 1.0 );
|
|||
|
#endif
|
|||
|
`,aG=`
|
|||
|
struct PhysicalMaterial {
|
|||
|
|
|||
|
vec3 diffuseColor;
|
|||
|
float specularRoughness;
|
|||
|
vec3 specularColor;
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
float clearCoat;
|
|||
|
float clearCoatRoughness;
|
|||
|
#endif
|
|||
|
|
|||
|
};
|
|||
|
|
|||
|
#define MAXIMUM_SPECULAR_COEFFICIENT 0.16
|
|||
|
#define DEFAULT_SPECULAR_COEFFICIENT 0.04
|
|||
|
|
|||
|
// Clear coat directional hemishperical reflectance (this approximation should be improved)
|
|||
|
float clearCoatDHRApprox( const in float roughness, const in float dotNL ) {
|
|||
|
|
|||
|
return DEFAULT_SPECULAR_COEFFICIENT + ( 1.0 - DEFAULT_SPECULAR_COEFFICIENT ) * ( pow( 1.0 - dotNL, 5.0 ) * pow( 1.0 - roughness, 2.0 ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
void RE_Direct_RectArea_Physical( const in RectAreaLight rectAreaLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
vec3 normal = geometry.normal;
|
|||
|
vec3 viewDir = geometry.viewDir;
|
|||
|
vec3 position = geometry.position;
|
|||
|
vec3 lightPos = rectAreaLight.position;
|
|||
|
vec3 halfWidth = rectAreaLight.halfWidth;
|
|||
|
vec3 halfHeight = rectAreaLight.halfHeight;
|
|||
|
vec3 lightColor = rectAreaLight.color;
|
|||
|
float roughness = material.specularRoughness;
|
|||
|
|
|||
|
vec3 rectCoords[ 4 ];
|
|||
|
rectCoords[ 0 ] = lightPos + halfWidth - halfHeight; // counterclockwise; light shines in local neg z direction
|
|||
|
rectCoords[ 1 ] = lightPos - halfWidth - halfHeight;
|
|||
|
rectCoords[ 2 ] = lightPos - halfWidth + halfHeight;
|
|||
|
rectCoords[ 3 ] = lightPos + halfWidth + halfHeight;
|
|||
|
|
|||
|
vec2 uv = LTC_Uv( normal, viewDir, roughness );
|
|||
|
|
|||
|
vec4 t1 = texture2D( ltc_1, uv );
|
|||
|
vec4 t2 = texture2D( ltc_2, uv );
|
|||
|
|
|||
|
mat3 mInv = mat3(
|
|||
|
vec3( t1.x, 0, t1.y ),
|
|||
|
vec3( 0, 1, 0 ),
|
|||
|
vec3( t1.z, 0, t1.w )
|
|||
|
);
|
|||
|
|
|||
|
// LTC Fresnel Approximation by Stephen Hill
|
|||
|
// http://blog.selfshadow.com/publications/s2016-advances/s2016_ltc_fresnel.pdf
|
|||
|
vec3 fresnel = ( material.specularColor * t2.x + ( vec3( 1.0 ) - material.specularColor ) * t2.y );
|
|||
|
|
|||
|
reflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );
|
|||
|
|
|||
|
reflectedLight.directDiffuse += lightColor * material.diffuseColor * LTC_Evaluate( normal, viewDir, position, mat3( 1.0 ), rectCoords );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
void RE_Direct_Physical( const in IncidentLight directLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
float dotNL = saturate( dot( geometry.normal, directLight.direction ) );
|
|||
|
|
|||
|
vec3 irradiance = dotNL * directLight.color;
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
irradiance *= PI; // punctual light
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
float clearCoatDHR = material.clearCoat * clearCoatDHRApprox( material.clearCoatRoughness, dotNL );
|
|||
|
#else
|
|||
|
float clearCoatDHR = 0.0;
|
|||
|
#endif
|
|||
|
|
|||
|
reflectedLight.directSpecular += ( 1.0 - clearCoatDHR ) * irradiance * BRDF_Specular_GGX( directLight, geometry, material.specularColor, material.specularRoughness );
|
|||
|
|
|||
|
reflectedLight.directDiffuse += ( 1.0 - clearCoatDHR ) * irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
|
|||
|
reflectedLight.directSpecular += irradiance * material.clearCoat * BRDF_Specular_GGX( directLight, geometry, vec3( DEFAULT_SPECULAR_COEFFICIENT ), material.clearCoatRoughness );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
void RE_IndirectDiffuse_Physical( const in vec3 irradiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
void RE_IndirectSpecular_Physical( const in vec3 radiance, const in vec3 clearCoatRadiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
|
|||
|
float dotNL = dotNV;
|
|||
|
float clearCoatDHR = material.clearCoat * clearCoatDHRApprox( material.clearCoatRoughness, dotNL );
|
|||
|
#else
|
|||
|
float clearCoatDHR = 0.0;
|
|||
|
#endif
|
|||
|
|
|||
|
reflectedLight.indirectSpecular += ( 1.0 - clearCoatDHR ) * radiance * BRDF_Specular_GGX_Environment( geometry, material.specularColor, material.specularRoughness );
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
|
|||
|
reflectedLight.indirectSpecular += clearCoatRadiance * material.clearCoat * BRDF_Specular_GGX_Environment( geometry, vec3( DEFAULT_SPECULAR_COEFFICIENT ), material.clearCoatRoughness );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#define RE_Direct RE_Direct_Physical
|
|||
|
#define RE_Direct_RectArea RE_Direct_RectArea_Physical
|
|||
|
#define RE_IndirectDiffuse RE_IndirectDiffuse_Physical
|
|||
|
#define RE_IndirectSpecular RE_IndirectSpecular_Physical
|
|||
|
|
|||
|
#define Material_BlinnShininessExponent( material ) GGXRoughnessToBlinnExponent( material.specularRoughness )
|
|||
|
#define Material_ClearCoat_BlinnShininessExponent( material ) GGXRoughnessToBlinnExponent( material.clearCoatRoughness )
|
|||
|
|
|||
|
// ref: https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
|
|||
|
float computeSpecularOcclusion( const in float dotNV, const in float ambientOcclusion, const in float roughness ) {
|
|||
|
|
|||
|
return saturate( pow( dotNV + ambientOcclusion, exp2( - 16.0 * roughness - 1.0 ) ) - 1.0 + ambientOcclusion );
|
|||
|
|
|||
|
}
|
|||
|
`,sG=`
|
|||
|
/**
|
|||
|
* This is a template that can be used to light a material, it uses pluggable
|
|||
|
* RenderEquations (RE)for specific lighting scenarios.
|
|||
|
*
|
|||
|
* Instructions for use:
|
|||
|
* - Ensure that both RE_Direct, RE_IndirectDiffuse and RE_IndirectSpecular are defined
|
|||
|
* - If you have defined an RE_IndirectSpecular, you need to also provide a Material_LightProbeLOD. <---- ???
|
|||
|
* - Create a material parameter that is to be passed as the third parameter to your lighting functions.
|
|||
|
*
|
|||
|
* TODO:
|
|||
|
* - Add area light support.
|
|||
|
* - Add sphere light support.
|
|||
|
* - Add diffuse light probe (irradiance cubemap) support.
|
|||
|
*/
|
|||
|
|
|||
|
GeometricContext geometry;
|
|||
|
|
|||
|
geometry.position = - vViewPosition;
|
|||
|
geometry.normal = normal;
|
|||
|
geometry.viewDir = normalize( vViewPosition );
|
|||
|
|
|||
|
IncidentLight directLight;
|
|||
|
|
|||
|
#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )
|
|||
|
|
|||
|
PointLight pointLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
pointLight = pointLights[ i ];
|
|||
|
|
|||
|
getPointDirectLightIrradiance( pointLight, geometry, directLight );
|
|||
|
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
directLight.color *= all( bvec2( pointLight.shadow, directLight.visible ) ) ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;
|
|||
|
#endif
|
|||
|
|
|||
|
RE_Direct( directLight, geometry, material, reflectedLight );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )
|
|||
|
|
|||
|
SpotLight spotLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
spotLight = spotLights[ i ];
|
|||
|
|
|||
|
getSpotDirectLightIrradiance( spotLight, geometry, directLight );
|
|||
|
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
directLight.color *= all( bvec2( spotLight.shadow, directLight.visible ) ) ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowBias, spotLight.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;
|
|||
|
#endif
|
|||
|
|
|||
|
RE_Direct( directLight, geometry, material, reflectedLight );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct )
|
|||
|
|
|||
|
DirectionalLight directionalLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
directionalLight = directionalLights[ i ];
|
|||
|
|
|||
|
getDirectionalDirectLightIrradiance( directionalLight, geometry, directLight );
|
|||
|
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
directLight.color *= all( bvec2( directionalLight.shadow, directLight.visible ) ) ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
|
|||
|
#endif
|
|||
|
|
|||
|
RE_Direct( directLight, geometry, material, reflectedLight );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )
|
|||
|
|
|||
|
RectAreaLight rectAreaLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
rectAreaLight = rectAreaLights[ i ];
|
|||
|
RE_Direct_RectArea( rectAreaLight, geometry, material, reflectedLight );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( RE_IndirectDiffuse )
|
|||
|
|
|||
|
vec3 irradiance = getAmbientLightIrradiance( ambientLightColor );
|
|||
|
|
|||
|
#if ( NUM_HEMI_LIGHTS > 0 )
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
irradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( RE_IndirectSpecular )
|
|||
|
|
|||
|
vec3 radiance = vec3( 0.0 );
|
|||
|
vec3 clearCoatRadiance = vec3( 0.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
`,rG=`
|
|||
|
#if defined( RE_IndirectDiffuse )
|
|||
|
|
|||
|
#ifdef USE_LIGHTMAP
|
|||
|
|
|||
|
vec3 lightMapIrradiance = texture2D( lightMap, vUv2 ).xyz * lightMapIntensity;
|
|||
|
|
|||
|
#ifndef PHYSICALLY_CORRECT_LIGHTS
|
|||
|
|
|||
|
lightMapIrradiance *= PI; // factor of PI should not be present; included here to prevent breakage
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
irradiance += lightMapIrradiance;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( USE_ENVMAP ) && defined( PHYSICAL ) && defined( ENVMAP_TYPE_CUBE_UV )
|
|||
|
|
|||
|
irradiance += getLightProbeIndirectIrradiance( /*lightProbe,*/ geometry, maxMipLevel );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( USE_ENVMAP ) && defined( RE_IndirectSpecular )
|
|||
|
|
|||
|
radiance += getLightProbeIndirectRadiance( /*specularLightProbe,*/ geometry, Material_BlinnShininessExponent( material ), maxMipLevel );
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
clearCoatRadiance += getLightProbeIndirectRadiance( /*specularLightProbe,*/ geometry, Material_ClearCoat_BlinnShininessExponent( material ), maxMipLevel );
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,oG=`
|
|||
|
#if defined( RE_IndirectDiffuse )
|
|||
|
|
|||
|
RE_IndirectDiffuse( irradiance, geometry, material, reflectedLight );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if defined( RE_IndirectSpecular )
|
|||
|
|
|||
|
RE_IndirectSpecular( radiance, clearCoatRadiance, geometry, material, reflectedLight );
|
|||
|
|
|||
|
#endif
|
|||
|
`,lG=`
|
|||
|
#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )
|
|||
|
|
|||
|
gl_FragDepthEXT = log2( vFragDepth ) * logDepthBufFC * 0.5;
|
|||
|
|
|||
|
#endif
|
|||
|
`,uG=`
|
|||
|
#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )
|
|||
|
|
|||
|
uniform float logDepthBufFC;
|
|||
|
varying float vFragDepth;
|
|||
|
|
|||
|
#endif
|
|||
|
`,cG=`
|
|||
|
#ifdef USE_LOGDEPTHBUF
|
|||
|
|
|||
|
#ifdef USE_LOGDEPTHBUF_EXT
|
|||
|
|
|||
|
varying float vFragDepth;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
uniform float logDepthBufFC;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,dG=`
|
|||
|
#ifdef USE_LOGDEPTHBUF
|
|||
|
|
|||
|
#ifdef USE_LOGDEPTHBUF_EXT
|
|||
|
|
|||
|
vFragDepth = 1.0 + gl_Position.w;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
gl_Position.z = log2( max( EPSILON, gl_Position.w + 1.0 ) ) * logDepthBufFC - 1.0;
|
|||
|
|
|||
|
gl_Position.z *= gl_Position.w;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,fG=`
|
|||
|
#ifdef USE_MAP
|
|||
|
|
|||
|
vec4 texelColor = texture2D( map, vUv );
|
|||
|
|
|||
|
texelColor = mapTexelToLinear( texelColor );
|
|||
|
diffuseColor *= texelColor;
|
|||
|
|
|||
|
#endif
|
|||
|
`,hG=`
|
|||
|
#ifdef USE_MAP
|
|||
|
|
|||
|
uniform sampler2D map;
|
|||
|
|
|||
|
#endif
|
|||
|
`,AG=`
|
|||
|
#ifdef USE_MAP
|
|||
|
|
|||
|
vec2 uv = ( uvTransform * vec3( gl_PointCoord.x, 1.0 - gl_PointCoord.y, 1 ) ).xy;
|
|||
|
vec4 mapTexel = texture2D( map, uv );
|
|||
|
diffuseColor *= mapTexelToLinear( mapTexel );
|
|||
|
|
|||
|
#endif
|
|||
|
`,gG=`
|
|||
|
#ifdef USE_MAP
|
|||
|
|
|||
|
uniform mat3 uvTransform;
|
|||
|
uniform sampler2D map;
|
|||
|
|
|||
|
#endif
|
|||
|
`,mG=`
|
|||
|
float metalnessFactor = metalness;
|
|||
|
|
|||
|
#ifdef USE_METALNESSMAP
|
|||
|
|
|||
|
vec4 texelMetalness = texture2D( metalnessMap, vUv );
|
|||
|
|
|||
|
// reads channel B, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
|
|||
|
metalnessFactor *= texelMetalness.b;
|
|||
|
|
|||
|
#endif
|
|||
|
`,pG=`
|
|||
|
#ifdef USE_METALNESSMAP
|
|||
|
|
|||
|
uniform sampler2D metalnessMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,_G=`
|
|||
|
#ifdef USE_MORPHNORMALS
|
|||
|
|
|||
|
objectNormal += ( morphNormal0 - normal ) * morphTargetInfluences[ 0 ];
|
|||
|
objectNormal += ( morphNormal1 - normal ) * morphTargetInfluences[ 1 ];
|
|||
|
objectNormal += ( morphNormal2 - normal ) * morphTargetInfluences[ 2 ];
|
|||
|
objectNormal += ( morphNormal3 - normal ) * morphTargetInfluences[ 3 ];
|
|||
|
|
|||
|
#endif
|
|||
|
`,FG=`
|
|||
|
#ifdef USE_MORPHTARGETS
|
|||
|
|
|||
|
#ifndef USE_MORPHNORMALS
|
|||
|
|
|||
|
uniform float morphTargetInfluences[ 8 ];
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
uniform float morphTargetInfluences[ 4 ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,bG=`
|
|||
|
#ifdef USE_MORPHTARGETS
|
|||
|
|
|||
|
transformed += ( morphTarget0 - position ) * morphTargetInfluences[ 0 ];
|
|||
|
transformed += ( morphTarget1 - position ) * morphTargetInfluences[ 1 ];
|
|||
|
transformed += ( morphTarget2 - position ) * morphTargetInfluences[ 2 ];
|
|||
|
transformed += ( morphTarget3 - position ) * morphTargetInfluences[ 3 ];
|
|||
|
|
|||
|
#ifndef USE_MORPHNORMALS
|
|||
|
|
|||
|
transformed += ( morphTarget4 - position ) * morphTargetInfluences[ 4 ];
|
|||
|
transformed += ( morphTarget5 - position ) * morphTargetInfluences[ 5 ];
|
|||
|
transformed += ( morphTarget6 - position ) * morphTargetInfluences[ 6 ];
|
|||
|
transformed += ( morphTarget7 - position ) * morphTargetInfluences[ 7 ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,vG=`
|
|||
|
#ifdef FLAT_SHADED
|
|||
|
|
|||
|
// Workaround for Adreno/Nexus5 not able able to do dFdx( vViewPosition ) ...
|
|||
|
|
|||
|
vec3 fdx = vec3( dFdx( vViewPosition.x ), dFdx( vViewPosition.y ), dFdx( vViewPosition.z ) );
|
|||
|
vec3 fdy = vec3( dFdy( vViewPosition.x ), dFdy( vViewPosition.y ), dFdy( vViewPosition.z ) );
|
|||
|
vec3 normal = normalize( cross( fdx, fdy ) );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec3 normal = normalize( vNormal );
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
normal = normal * ( float( gl_FrontFacing ) * 2.0 - 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,yG=`
|
|||
|
#ifdef USE_NORMALMAP
|
|||
|
|
|||
|
#ifdef OBJECTSPACE_NORMALMAP
|
|||
|
|
|||
|
normal = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0; // overrides both flatShading and attribute normals
|
|||
|
|
|||
|
#ifdef FLIP_SIDED
|
|||
|
|
|||
|
normal = - normal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
normal = normal * ( float( gl_FrontFacing ) * 2.0 - 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
normal = normalize( normalMatrix * normal );
|
|||
|
|
|||
|
#else // tangent-space normal map
|
|||
|
|
|||
|
normal = perturbNormal2Arb( -vViewPosition, normal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#elif defined( USE_BUMPMAP )
|
|||
|
|
|||
|
normal = perturbNormalArb( -vViewPosition, normal, dHdxy_fwd() );
|
|||
|
|
|||
|
#endif
|
|||
|
`,wG=`
|
|||
|
#ifdef USE_NORMALMAP
|
|||
|
|
|||
|
uniform sampler2D normalMap;
|
|||
|
uniform vec2 normalScale;
|
|||
|
|
|||
|
#ifdef OBJECTSPACE_NORMALMAP
|
|||
|
|
|||
|
uniform mat3 normalMatrix;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
// Per-Pixel Tangent Space Normal Mapping
|
|||
|
// http://hacksoflife.blogspot.ch/2009/11/per-pixel-tangent-space-normal-mapping.html
|
|||
|
|
|||
|
vec3 perturbNormal2Arb( vec3 eye_pos, vec3 surf_norm ) {
|
|||
|
|
|||
|
// Workaround for Adreno 3XX dFd*( vec3 ) bug. See #9988
|
|||
|
|
|||
|
vec3 q0 = vec3( dFdx( eye_pos.x ), dFdx( eye_pos.y ), dFdx( eye_pos.z ) );
|
|||
|
vec3 q1 = vec3( dFdy( eye_pos.x ), dFdy( eye_pos.y ), dFdy( eye_pos.z ) );
|
|||
|
vec2 st0 = dFdx( vUv.st );
|
|||
|
vec2 st1 = dFdy( vUv.st );
|
|||
|
|
|||
|
float scale = sign( st1.t * st0.s - st0.t * st1.s ); // we do not care about the magnitude
|
|||
|
|
|||
|
vec3 S = normalize( ( q0 * st1.t - q1 * st0.t ) * scale );
|
|||
|
vec3 T = normalize( ( - q0 * st1.s + q1 * st0.s ) * scale );
|
|||
|
vec3 N = normalize( surf_norm );
|
|||
|
mat3 tsn = mat3( S, T, N );
|
|||
|
|
|||
|
vec3 mapN = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0;
|
|||
|
|
|||
|
mapN.xy *= normalScale;
|
|||
|
mapN.xy *= ( float( gl_FrontFacing ) * 2.0 - 1.0 );
|
|||
|
|
|||
|
return normalize( tsn * mapN );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,CG=`
|
|||
|
vec3 packNormalToRGB( const in vec3 normal ) {
|
|||
|
return normalize( normal ) * 0.5 + 0.5;
|
|||
|
}
|
|||
|
|
|||
|
vec3 unpackRGBToNormal( const in vec3 rgb ) {
|
|||
|
return 2.0 * rgb.xyz - 1.0;
|
|||
|
}
|
|||
|
|
|||
|
const float PackUpscale = 256. / 255.; // fraction -> 0..1 (including 1)
|
|||
|
const float UnpackDownscale = 255. / 256.; // 0..1 -> fraction (excluding 1)
|
|||
|
|
|||
|
const vec3 PackFactors = vec3( 256. * 256. * 256., 256. * 256., 256. );
|
|||
|
const vec4 UnpackFactors = UnpackDownscale / vec4( PackFactors, 1. );
|
|||
|
|
|||
|
const float ShiftRight8 = 1. / 256.;
|
|||
|
|
|||
|
vec4 packDepthToRGBA( const in float v ) {
|
|||
|
vec4 r = vec4( fract( v * PackFactors ), v );
|
|||
|
r.yzw -= r.xyz * ShiftRight8; // tidy overflow
|
|||
|
return r * PackUpscale;
|
|||
|
}
|
|||
|
|
|||
|
float unpackRGBAToDepth( const in vec4 v ) {
|
|||
|
return dot( v, UnpackFactors );
|
|||
|
}
|
|||
|
|
|||
|
// NOTE: viewZ/eyeZ is < 0 when in front of the camera per OpenGL conventions
|
|||
|
|
|||
|
float viewZToOrthographicDepth( const in float viewZ, const in float near, const in float far ) {
|
|||
|
return ( viewZ + near ) / ( near - far );
|
|||
|
}
|
|||
|
float orthographicDepthToViewZ( const in float linearClipZ, const in float near, const in float far ) {
|
|||
|
return linearClipZ * ( near - far ) - near;
|
|||
|
}
|
|||
|
|
|||
|
float viewZToPerspectiveDepth( const in float viewZ, const in float near, const in float far ) {
|
|||
|
return (( near + viewZ ) * far ) / (( far - near ) * viewZ );
|
|||
|
}
|
|||
|
float perspectiveDepthToViewZ( const in float invClipZ, const in float near, const in float far ) {
|
|||
|
return ( near * far ) / ( ( far - near ) * invClipZ - far );
|
|||
|
}
|
|||
|
`,kG=`
|
|||
|
#ifdef PREMULTIPLIED_ALPHA
|
|||
|
|
|||
|
// Get get normal blending with premultipled, use with CustomBlending, OneFactor, OneMinusSrcAlphaFactor, AddEquation.
|
|||
|
gl_FragColor.rgb *= gl_FragColor.a;
|
|||
|
|
|||
|
#endif
|
|||
|
`,EG=`
|
|||
|
vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );
|
|||
|
|
|||
|
gl_Position = projectionMatrix * mvPosition;
|
|||
|
`,BG=`
|
|||
|
#if defined( DITHERING )
|
|||
|
|
|||
|
gl_FragColor.rgb = dithering( gl_FragColor.rgb );
|
|||
|
|
|||
|
#endif
|
|||
|
`,SG=`
|
|||
|
#if defined( DITHERING )
|
|||
|
|
|||
|
// based on https://www.shadertoy.com/view/MslGR8
|
|||
|
vec3 dithering( vec3 color ) {
|
|||
|
//Calculate grid position
|
|||
|
float grid_position = rand( gl_FragCoord.xy );
|
|||
|
|
|||
|
//Shift the individual colors differently, thus making it even harder to see the dithering pattern
|
|||
|
vec3 dither_shift_RGB = vec3( 0.25 / 255.0, -0.25 / 255.0, 0.25 / 255.0 );
|
|||
|
|
|||
|
//modify shift acording to grid position.
|
|||
|
dither_shift_RGB = mix( 2.0 * dither_shift_RGB, -2.0 * dither_shift_RGB, grid_position );
|
|||
|
|
|||
|
//shift the color by dither_shift
|
|||
|
return color + dither_shift_RGB;
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,DG=`
|
|||
|
float roughnessFactor = roughness;
|
|||
|
|
|||
|
#ifdef USE_ROUGHNESSMAP
|
|||
|
|
|||
|
vec4 texelRoughness = texture2D( roughnessMap, vUv );
|
|||
|
|
|||
|
// reads channel G, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
|
|||
|
roughnessFactor *= texelRoughness.g;
|
|||
|
|
|||
|
#endif
|
|||
|
`,xG=`
|
|||
|
#ifdef USE_ROUGHNESSMAP
|
|||
|
|
|||
|
uniform sampler2D roughnessMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,TG=`
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
uniform sampler2D directionalShadowMap[ NUM_DIR_LIGHTS ];
|
|||
|
varying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
uniform sampler2D spotShadowMap[ NUM_SPOT_LIGHTS ];
|
|||
|
varying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
uniform sampler2D pointShadowMap[ NUM_POINT_LIGHTS ];
|
|||
|
varying vec4 vPointShadowCoord[ NUM_POINT_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
// TODO (abelnation): create uniforms for area light shadows
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
float texture2DCompare( sampler2D depths, vec2 uv, float compare ) {
|
|||
|
|
|||
|
return step( compare, unpackRGBAToDepth( texture2D( depths, uv ) ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float texture2DShadowLerp( sampler2D depths, vec2 size, vec2 uv, float compare ) {
|
|||
|
|
|||
|
const vec2 offset = vec2( 0.0, 1.0 );
|
|||
|
|
|||
|
vec2 texelSize = vec2( 1.0 ) / size;
|
|||
|
vec2 centroidUV = floor( uv * size + 0.5 ) / size;
|
|||
|
|
|||
|
float lb = texture2DCompare( depths, centroidUV + texelSize * offset.xx, compare );
|
|||
|
float lt = texture2DCompare( depths, centroidUV + texelSize * offset.xy, compare );
|
|||
|
float rb = texture2DCompare( depths, centroidUV + texelSize * offset.yx, compare );
|
|||
|
float rt = texture2DCompare( depths, centroidUV + texelSize * offset.yy, compare );
|
|||
|
|
|||
|
vec2 f = fract( uv * size + 0.5 );
|
|||
|
|
|||
|
float a = mix( lb, lt, f.y );
|
|||
|
float b = mix( rb, rt, f.y );
|
|||
|
float c = mix( a, b, f.x );
|
|||
|
|
|||
|
return c;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord ) {
|
|||
|
|
|||
|
float shadow = 1.0;
|
|||
|
|
|||
|
shadowCoord.xyz /= shadowCoord.w;
|
|||
|
shadowCoord.z += shadowBias;
|
|||
|
|
|||
|
// if ( something && something ) breaks ATI OpenGL shader compiler
|
|||
|
// if ( all( something, something ) ) using this instead
|
|||
|
|
|||
|
bvec4 inFrustumVec = bvec4 ( shadowCoord.x >= 0.0, shadowCoord.x <= 1.0, shadowCoord.y >= 0.0, shadowCoord.y <= 1.0 );
|
|||
|
bool inFrustum = all( inFrustumVec );
|
|||
|
|
|||
|
bvec2 frustumTestVec = bvec2( inFrustum, shadowCoord.z <= 1.0 );
|
|||
|
|
|||
|
bool frustumTest = all( frustumTestVec );
|
|||
|
|
|||
|
if ( frustumTest ) {
|
|||
|
|
|||
|
#if defined( SHADOWMAP_TYPE_PCF )
|
|||
|
|
|||
|
vec2 texelSize = vec2( 1.0 ) / shadowMapSize;
|
|||
|
|
|||
|
float dx0 = - texelSize.x * shadowRadius;
|
|||
|
float dy0 = - texelSize.y * shadowRadius;
|
|||
|
float dx1 = + texelSize.x * shadowRadius;
|
|||
|
float dy1 = + texelSize.y * shadowRadius;
|
|||
|
|
|||
|
shadow = (
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +
|
|||
|
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )
|
|||
|
) * ( 1.0 / 9.0 );
|
|||
|
|
|||
|
#elif defined( SHADOWMAP_TYPE_PCF_SOFT )
|
|||
|
|
|||
|
vec2 texelSize = vec2( 1.0 ) / shadowMapSize;
|
|||
|
|
|||
|
float dx0 = - texelSize.x * shadowRadius;
|
|||
|
float dy0 = - texelSize.y * shadowRadius;
|
|||
|
float dx1 = + texelSize.x * shadowRadius;
|
|||
|
float dy1 = + texelSize.y * shadowRadius;
|
|||
|
|
|||
|
shadow = (
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy, shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +
|
|||
|
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )
|
|||
|
) * ( 1.0 / 9.0 );
|
|||
|
|
|||
|
#else // no percentage-closer filtering:
|
|||
|
|
|||
|
shadow = texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
return shadow;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// cubeToUV() maps a 3D direction vector suitable for cube texture mapping to a 2D
|
|||
|
// vector suitable for 2D texture mapping. This code uses the following layout for the
|
|||
|
// 2D texture:
|
|||
|
//
|
|||
|
// xzXZ
|
|||
|
// y Y
|
|||
|
//
|
|||
|
// Y - Positive y direction
|
|||
|
// y - Negative y direction
|
|||
|
// X - Positive x direction
|
|||
|
// x - Negative x direction
|
|||
|
// Z - Positive z direction
|
|||
|
// z - Negative z direction
|
|||
|
//
|
|||
|
// Source and test bed:
|
|||
|
// https://gist.github.com/tschw/da10c43c467ce8afd0c4
|
|||
|
|
|||
|
vec2 cubeToUV( vec3 v, float texelSizeY ) {
|
|||
|
|
|||
|
// Number of texels to avoid at the edge of each square
|
|||
|
|
|||
|
vec3 absV = abs( v );
|
|||
|
|
|||
|
// Intersect unit cube
|
|||
|
|
|||
|
float scaleToCube = 1.0 / max( absV.x, max( absV.y, absV.z ) );
|
|||
|
absV *= scaleToCube;
|
|||
|
|
|||
|
// Apply scale to avoid seams
|
|||
|
|
|||
|
// two texels less per square (one texel will do for NEAREST)
|
|||
|
v *= scaleToCube * ( 1.0 - 2.0 * texelSizeY );
|
|||
|
|
|||
|
// Unwrap
|
|||
|
|
|||
|
// space: -1 ... 1 range for each square
|
|||
|
//
|
|||
|
// #X## dim := ( 4 , 2 )
|
|||
|
// # # center := ( 1 , 1 )
|
|||
|
|
|||
|
vec2 planar = v.xy;
|
|||
|
|
|||
|
float almostATexel = 1.5 * texelSizeY;
|
|||
|
float almostOne = 1.0 - almostATexel;
|
|||
|
|
|||
|
if ( absV.z >= almostOne ) {
|
|||
|
|
|||
|
if ( v.z > 0.0 )
|
|||
|
planar.x = 4.0 - v.x;
|
|||
|
|
|||
|
} else if ( absV.x >= almostOne ) {
|
|||
|
|
|||
|
float signX = sign( v.x );
|
|||
|
planar.x = v.z * signX + 2.0 * signX;
|
|||
|
|
|||
|
} else if ( absV.y >= almostOne ) {
|
|||
|
|
|||
|
float signY = sign( v.y );
|
|||
|
planar.x = v.x + 2.0 * signY + 2.0;
|
|||
|
planar.y = v.z * signY - 2.0;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// Transform to UV space
|
|||
|
|
|||
|
// scale := 0.5 / dim
|
|||
|
// translate := ( center + 0.5 ) / dim
|
|||
|
return vec2( 0.125, 0.25 ) * planar + vec2( 0.375, 0.75 );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
float getPointShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {
|
|||
|
|
|||
|
vec2 texelSize = vec2( 1.0 ) / ( shadowMapSize * vec2( 4.0, 2.0 ) );
|
|||
|
|
|||
|
// for point lights, the uniform @vShadowCoord is re-purposed to hold
|
|||
|
// the vector from the light to the world-space position of the fragment.
|
|||
|
vec3 lightToPosition = shadowCoord.xyz;
|
|||
|
|
|||
|
// dp = normalized distance from light to fragment position
|
|||
|
float dp = ( length( lightToPosition ) - shadowCameraNear ) / ( shadowCameraFar - shadowCameraNear ); // need to clamp?
|
|||
|
dp += shadowBias;
|
|||
|
|
|||
|
// bd3D = base direction 3D
|
|||
|
vec3 bd3D = normalize( lightToPosition );
|
|||
|
|
|||
|
#if defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_PCF_SOFT )
|
|||
|
|
|||
|
vec2 offset = vec2( - 1, 1 ) * shadowRadius * texelSize.y;
|
|||
|
|
|||
|
return (
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyy, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyy, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyx, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyx, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxy, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxy, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxx, texelSize.y ), dp ) +
|
|||
|
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxx, texelSize.y ), dp )
|
|||
|
) * ( 1.0 / 9.0 );
|
|||
|
|
|||
|
#else // no percentage-closer filtering
|
|||
|
|
|||
|
return texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
`,IG=`
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
uniform mat4 directionalShadowMatrix[ NUM_DIR_LIGHTS ];
|
|||
|
varying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
uniform mat4 spotShadowMatrix[ NUM_SPOT_LIGHTS ];
|
|||
|
varying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
uniform mat4 pointShadowMatrix[ NUM_POINT_LIGHTS ];
|
|||
|
varying vec4 vPointShadowCoord[ NUM_POINT_LIGHTS ];
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
// TODO (abelnation): uniforms for area light shadows
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
#endif
|
|||
|
`,PG=`
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
vDirectionalShadowCoord[ i ] = directionalShadowMatrix[ i ] * worldPosition;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
vSpotShadowCoord[ i ] = spotShadowMatrix[ i ] * worldPosition;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
vPointShadowCoord[ i ] = pointShadowMatrix[ i ] * worldPosition;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
// TODO (abelnation): update vAreaShadowCoord with area light info
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
#endif
|
|||
|
`,MG=`
|
|||
|
float getShadowMask() {
|
|||
|
|
|||
|
float shadow = 1.0;
|
|||
|
|
|||
|
#ifdef USE_SHADOWMAP
|
|||
|
|
|||
|
#if NUM_DIR_LIGHTS > 0
|
|||
|
|
|||
|
DirectionalLight directionalLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
directionalLight = directionalLights[ i ];
|
|||
|
shadow *= bool( directionalLight.shadow ) ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_SPOT_LIGHTS > 0
|
|||
|
|
|||
|
SpotLight spotLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
spotLight = spotLights[ i ];
|
|||
|
shadow *= bool( spotLight.shadow ) ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowBias, spotLight.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#if NUM_POINT_LIGHTS > 0
|
|||
|
|
|||
|
PointLight pointLight;
|
|||
|
|
|||
|
#pragma unroll_loop
|
|||
|
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
|
|||
|
|
|||
|
pointLight = pointLights[ i ];
|
|||
|
shadow *= bool( pointLight.shadow ) ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
/*
|
|||
|
#if NUM_RECT_AREA_LIGHTS > 0
|
|||
|
|
|||
|
// TODO (abelnation): update shadow for Area light
|
|||
|
|
|||
|
#endif
|
|||
|
*/
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
return shadow;
|
|||
|
|
|||
|
}
|
|||
|
`,LG=`
|
|||
|
#ifdef USE_SKINNING
|
|||
|
|
|||
|
mat4 boneMatX = getBoneMatrix( skinIndex.x );
|
|||
|
mat4 boneMatY = getBoneMatrix( skinIndex.y );
|
|||
|
mat4 boneMatZ = getBoneMatrix( skinIndex.z );
|
|||
|
mat4 boneMatW = getBoneMatrix( skinIndex.w );
|
|||
|
|
|||
|
#endif
|
|||
|
`,RG=`
|
|||
|
#ifdef USE_SKINNING
|
|||
|
|
|||
|
uniform mat4 bindMatrix;
|
|||
|
uniform mat4 bindMatrixInverse;
|
|||
|
|
|||
|
#ifdef BONE_TEXTURE
|
|||
|
|
|||
|
uniform sampler2D boneTexture;
|
|||
|
uniform int boneTextureSize;
|
|||
|
|
|||
|
mat4 getBoneMatrix( const in float i ) {
|
|||
|
|
|||
|
float j = i * 4.0;
|
|||
|
float x = mod( j, float( boneTextureSize ) );
|
|||
|
float y = floor( j / float( boneTextureSize ) );
|
|||
|
|
|||
|
float dx = 1.0 / float( boneTextureSize );
|
|||
|
float dy = 1.0 / float( boneTextureSize );
|
|||
|
|
|||
|
y = dy * ( y + 0.5 );
|
|||
|
|
|||
|
vec4 v1 = texture2D( boneTexture, vec2( dx * ( x + 0.5 ), y ) );
|
|||
|
vec4 v2 = texture2D( boneTexture, vec2( dx * ( x + 1.5 ), y ) );
|
|||
|
vec4 v3 = texture2D( boneTexture, vec2( dx * ( x + 2.5 ), y ) );
|
|||
|
vec4 v4 = texture2D( boneTexture, vec2( dx * ( x + 3.5 ), y ) );
|
|||
|
|
|||
|
mat4 bone = mat4( v1, v2, v3, v4 );
|
|||
|
|
|||
|
return bone;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
uniform mat4 boneMatrices[ MAX_BONES ];
|
|||
|
|
|||
|
mat4 getBoneMatrix( const in float i ) {
|
|||
|
|
|||
|
mat4 bone = boneMatrices[ int(i) ];
|
|||
|
return bone;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#endif
|
|||
|
`,UG=`
|
|||
|
#ifdef USE_SKINNING
|
|||
|
|
|||
|
vec4 skinVertex = bindMatrix * vec4( transformed, 1.0 );
|
|||
|
|
|||
|
vec4 skinned = vec4( 0.0 );
|
|||
|
skinned += boneMatX * skinVertex * skinWeight.x;
|
|||
|
skinned += boneMatY * skinVertex * skinWeight.y;
|
|||
|
skinned += boneMatZ * skinVertex * skinWeight.z;
|
|||
|
skinned += boneMatW * skinVertex * skinWeight.w;
|
|||
|
|
|||
|
transformed = ( bindMatrixInverse * skinned ).xyz;
|
|||
|
|
|||
|
#endif
|
|||
|
`,OG=`
|
|||
|
#ifdef USE_SKINNING
|
|||
|
|
|||
|
mat4 skinMatrix = mat4( 0.0 );
|
|||
|
skinMatrix += skinWeight.x * boneMatX;
|
|||
|
skinMatrix += skinWeight.y * boneMatY;
|
|||
|
skinMatrix += skinWeight.z * boneMatZ;
|
|||
|
skinMatrix += skinWeight.w * boneMatW;
|
|||
|
skinMatrix = bindMatrixInverse * skinMatrix * bindMatrix;
|
|||
|
|
|||
|
objectNormal = vec4( skinMatrix * vec4( objectNormal, 0.0 ) ).xyz;
|
|||
|
|
|||
|
#endif
|
|||
|
`,NG=`
|
|||
|
float specularStrength;
|
|||
|
|
|||
|
#ifdef USE_SPECULARMAP
|
|||
|
|
|||
|
vec4 texelSpecular = texture2D( specularMap, vUv );
|
|||
|
specularStrength = texelSpecular.r;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
specularStrength = 1.0;
|
|||
|
|
|||
|
#endif
|
|||
|
`,HG=`
|
|||
|
#ifdef USE_SPECULARMAP
|
|||
|
|
|||
|
uniform sampler2D specularMap;
|
|||
|
|
|||
|
#endif
|
|||
|
`,QG=`
|
|||
|
#if defined( TONE_MAPPING )
|
|||
|
|
|||
|
gl_FragColor.rgb = toneMapping( gl_FragColor.rgb );
|
|||
|
|
|||
|
#endif
|
|||
|
`,jG=`
|
|||
|
#ifndef saturate
|
|||
|
#define saturate(a) clamp( a, 0.0, 1.0 )
|
|||
|
#endif
|
|||
|
|
|||
|
uniform float toneMappingExposure;
|
|||
|
uniform float toneMappingWhitePoint;
|
|||
|
|
|||
|
// exposure only
|
|||
|
vec3 LinearToneMapping( vec3 color ) {
|
|||
|
|
|||
|
return toneMappingExposure * color;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// source: https://www.cs.utah.edu/~reinhard/cdrom/
|
|||
|
vec3 ReinhardToneMapping( vec3 color ) {
|
|||
|
|
|||
|
color *= toneMappingExposure;
|
|||
|
return saturate( color / ( vec3( 1.0 ) + color ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// source: http://filmicgames.com/archives/75
|
|||
|
#define Uncharted2Helper( x ) max( ( ( x * ( 0.15 * x + 0.10 * 0.50 ) + 0.20 * 0.02 ) / ( x * ( 0.15 * x + 0.50 ) + 0.20 * 0.30 ) ) - 0.02 / 0.30, vec3( 0.0 ) )
|
|||
|
vec3 Uncharted2ToneMapping( vec3 color ) {
|
|||
|
|
|||
|
// John Hable's filmic operator from Uncharted 2 video game
|
|||
|
color *= toneMappingExposure;
|
|||
|
return saturate( Uncharted2Helper( color ) / Uncharted2Helper( vec3( toneMappingWhitePoint ) ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// source: http://filmicgames.com/archives/75
|
|||
|
vec3 OptimizedCineonToneMapping( vec3 color ) {
|
|||
|
|
|||
|
// optimized filmic operator by Jim Hejl and Richard Burgess-Dawson
|
|||
|
color *= toneMappingExposure;
|
|||
|
color = max( vec3( 0.0 ), color - 0.004 );
|
|||
|
return pow( ( color * ( 6.2 * color + 0.5 ) ) / ( color * ( 6.2 * color + 1.7 ) + 0.06 ), vec3( 2.2 ) );
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
// source: https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/
|
|||
|
vec3 ACESFilmicToneMapping( vec3 color ) {
|
|||
|
|
|||
|
color *= toneMappingExposure;
|
|||
|
return saturate( ( color * ( 2.51 * color + 0.03 ) ) / ( color * ( 2.43 * color + 0.59 ) + 0.14 ) );
|
|||
|
|
|||
|
}
|
|||
|
`,GG=`
|
|||
|
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
|
|||
|
|
|||
|
varying vec2 vUv;
|
|||
|
|
|||
|
#endif
|
|||
|
`,zG=`
|
|||
|
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
|
|||
|
|
|||
|
varying vec2 vUv;
|
|||
|
uniform mat3 uvTransform;
|
|||
|
|
|||
|
#endif
|
|||
|
`,qG=`
|
|||
|
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
|
|||
|
|
|||
|
vUv = ( uvTransform * vec3( uv, 1 ) ).xy;
|
|||
|
|
|||
|
#endif
|
|||
|
`,VG=`
|
|||
|
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
|
|||
|
|
|||
|
varying vec2 vUv2;
|
|||
|
|
|||
|
#endif
|
|||
|
`,WG=`
|
|||
|
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
|
|||
|
|
|||
|
attribute vec2 uv2;
|
|||
|
varying vec2 vUv2;
|
|||
|
|
|||
|
#endif
|
|||
|
`,KG=`
|
|||
|
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
|
|||
|
|
|||
|
vUv2 = uv2;
|
|||
|
|
|||
|
#endif
|
|||
|
`,YG=`
|
|||
|
#if defined( USE_ENVMAP ) || defined( DISTANCE ) || defined ( USE_SHADOWMAP )
|
|||
|
|
|||
|
vec4 worldPosition = modelMatrix * vec4( transformed, 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
`,XG=`
|
|||
|
uniform sampler2D t2D;
|
|||
|
|
|||
|
varying vec2 vUv;
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vec4 texColor = texture2D( t2D, vUv );
|
|||
|
|
|||
|
gl_FragColor = mapTexelToLinear( texColor );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,JG=`
|
|||
|
varying vec2 vUv;
|
|||
|
uniform mat3 uvTransform;
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vUv = ( uvTransform * vec3( uv, 1 ) ).xy;
|
|||
|
|
|||
|
gl_Position = vec4( position.xy, 1.0, 1.0 );
|
|||
|
|
|||
|
}
|
|||
|
`,ZG=`
|
|||
|
uniform samplerCube tCube;
|
|||
|
uniform float tFlip;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
varying vec3 vWorldDirection;
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vec4 texColor = textureCube( tCube, vec3( tFlip * vWorldDirection.x, vWorldDirection.yz ) );
|
|||
|
|
|||
|
gl_FragColor = mapTexelToLinear( texColor );
|
|||
|
gl_FragColor.a *= opacity;
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,$G=`
|
|||
|
varying vec3 vWorldDirection;
|
|||
|
|
|||
|
#include <common>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vWorldDirection = transformDirection( position, modelMatrix );
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
|
|||
|
gl_Position.z = gl_Position.w; // set z to camera.far
|
|||
|
|
|||
|
}
|
|||
|
`,ez=`
|
|||
|
#if DEPTH_PACKING == 3200
|
|||
|
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( 1.0 );
|
|||
|
|
|||
|
#if DEPTH_PACKING == 3200
|
|||
|
|
|||
|
diffuseColor.a = opacity;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <map_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
|
|||
|
#if DEPTH_PACKING == 3200
|
|||
|
|
|||
|
gl_FragColor = vec4( vec3( 1.0 - gl_FragCoord.z ), opacity );
|
|||
|
|
|||
|
#elif DEPTH_PACKING == 3201
|
|||
|
|
|||
|
gl_FragColor = packDepthToRGBA( gl_FragCoord.z );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
`,tz=`
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
#include <skinbase_vertex>
|
|||
|
|
|||
|
#ifdef USE_DISPLACEMENTMAP
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,iz=`
|
|||
|
#define DISTANCE
|
|||
|
|
|||
|
uniform vec3 referencePosition;
|
|||
|
uniform float nearDistance;
|
|||
|
uniform float farDistance;
|
|||
|
varying vec3 vWorldPosition;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main () {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( 1.0 );
|
|||
|
|
|||
|
#include <map_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
|
|||
|
float dist = length( vWorldPosition - referencePosition );
|
|||
|
dist = ( dist - nearDistance ) / ( farDistance - nearDistance );
|
|||
|
dist = saturate( dist ); // clamp to [ 0, 1 ]
|
|||
|
|
|||
|
gl_FragColor = packDepthToRGBA( dist );
|
|||
|
|
|||
|
}
|
|||
|
`,nz=`
|
|||
|
#define DISTANCE
|
|||
|
|
|||
|
varying vec3 vWorldPosition;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
#include <skinbase_vertex>
|
|||
|
|
|||
|
#ifdef USE_DISPLACEMENTMAP
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
vWorldPosition = worldPosition.xyz;
|
|||
|
|
|||
|
}
|
|||
|
`,az=`
|
|||
|
uniform sampler2D tEquirect;
|
|||
|
|
|||
|
varying vec3 vWorldDirection;
|
|||
|
|
|||
|
#include <common>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vec3 direction = normalize( vWorldDirection );
|
|||
|
|
|||
|
vec2 sampleUV;
|
|||
|
|
|||
|
sampleUV.y = asin( clamp( direction.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
|
|||
|
|
|||
|
sampleUV.x = atan( direction.z, direction.x ) * RECIPROCAL_PI2 + 0.5;
|
|||
|
|
|||
|
vec4 texColor = texture2D( tEquirect, sampleUV );
|
|||
|
|
|||
|
gl_FragColor = mapTexelToLinear( texColor );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,sz=`
|
|||
|
varying vec3 vWorldDirection;
|
|||
|
|
|||
|
#include <common>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
vWorldDirection = transformDirection( position, modelMatrix );
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,rz=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
uniform float dashSize;
|
|||
|
uniform float totalSize;
|
|||
|
|
|||
|
varying float vLineDistance;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
if ( mod( vLineDistance, totalSize ) > dashSize ) {
|
|||
|
|
|||
|
discard;
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
vec3 outgoingLight = vec3( 0.0 );
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
|
|||
|
outgoingLight = diffuseColor.rgb; // simple shader
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,oz=`
|
|||
|
uniform float scale;
|
|||
|
attribute float lineDistance;
|
|||
|
|
|||
|
varying float vLineDistance;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <color_vertex>
|
|||
|
|
|||
|
vLineDistance = scale * lineDistance;
|
|||
|
|
|||
|
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
|
|||
|
gl_Position = projectionMatrix * mvPosition;
|
|||
|
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,lz=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <uv2_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <aomap_pars_fragment>
|
|||
|
#include <lightmap_pars_fragment>
|
|||
|
#include <envmap_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <specularmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <specularmap_fragment>
|
|||
|
|
|||
|
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
|
|||
|
|
|||
|
// accumulation (baked indirect lighting only)
|
|||
|
#ifdef USE_LIGHTMAP
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += texture2D( lightMap, vUv2 ).xyz * lightMapIntensity;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse += vec3( 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
// modulation
|
|||
|
#include <aomap_fragment>
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse *= diffuseColor.rgb;
|
|||
|
|
|||
|
vec3 outgoingLight = reflectedLight.indirectDiffuse;
|
|||
|
|
|||
|
#include <envmap_fragment>
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,uz=`
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <uv2_pars_vertex>
|
|||
|
#include <envmap_pars_vertex>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
#include <uv2_vertex>
|
|||
|
#include <color_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
|
|||
|
#ifdef USE_ENVMAP
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <envmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,cz=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform vec3 emissive;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
varying vec3 vLightFront;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
varying vec3 vLightBack;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <dithering_pars_fragment>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <uv2_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <aomap_pars_fragment>
|
|||
|
#include <lightmap_pars_fragment>
|
|||
|
#include <emissivemap_pars_fragment>
|
|||
|
#include <envmap_pars_fragment>
|
|||
|
#include <bsdfs>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <shadowmap_pars_fragment>
|
|||
|
#include <shadowmask_pars_fragment>
|
|||
|
#include <specularmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
|
|||
|
vec3 totalEmissiveRadiance = emissive;
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <specularmap_fragment>
|
|||
|
#include <emissivemap_fragment>
|
|||
|
|
|||
|
// accumulation
|
|||
|
reflectedLight.indirectDiffuse = getAmbientLightIrradiance( ambientLightColor );
|
|||
|
|
|||
|
#include <lightmap_fragment>
|
|||
|
|
|||
|
reflectedLight.indirectDiffuse *= BRDF_Diffuse_Lambert( diffuseColor.rgb );
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
reflectedLight.directDiffuse = ( gl_FrontFacing ) ? vLightFront : vLightBack;
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
reflectedLight.directDiffuse = vLightFront;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
reflectedLight.directDiffuse *= BRDF_Diffuse_Lambert( diffuseColor.rgb ) * getShadowMask();
|
|||
|
|
|||
|
// modulation
|
|||
|
#include <aomap_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;
|
|||
|
|
|||
|
#include <envmap_fragment>
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <dithering_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,dz=`
|
|||
|
#define LAMBERT
|
|||
|
|
|||
|
varying vec3 vLightFront;
|
|||
|
|
|||
|
#ifdef DOUBLE_SIDED
|
|||
|
|
|||
|
varying vec3 vLightBack;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <uv2_pars_vertex>
|
|||
|
#include <envmap_pars_vertex>
|
|||
|
#include <bsdfs>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <shadowmap_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
#include <uv2_vertex>
|
|||
|
#include <color_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <envmap_vertex>
|
|||
|
#include <lights_lambert_vertex>
|
|||
|
#include <shadowmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,fz=`
|
|||
|
#define MATCAP
|
|||
|
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
uniform sampler2D matcap;
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <bumpmap_pars_fragment>
|
|||
|
#include <normalmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <normal_fragment_begin>
|
|||
|
#include <normal_fragment_maps>
|
|||
|
|
|||
|
vec3 viewDir = normalize( vViewPosition );
|
|||
|
vec3 x = normalize( vec3( viewDir.z, 0.0, - viewDir.x ) );
|
|||
|
vec3 y = cross( viewDir, x );
|
|||
|
vec2 uv = vec2( dot( x, normal ), dot( y, normal ) ) * 0.495 + 0.5; // 0.495 to remove artifacts caused by undersized matcap disks
|
|||
|
|
|||
|
#ifdef USE_MATCAP
|
|||
|
|
|||
|
vec4 matcapColor = texture2D( matcap, uv );
|
|||
|
matcapColor = matcapTexelToLinear( matcapColor );
|
|||
|
|
|||
|
#else
|
|||
|
|
|||
|
vec4 matcapColor = vec4( 1.0 );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
vec3 outgoingLight = diffuseColor.rgb * matcapColor.rgb;
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,hz=`
|
|||
|
#define MATCAP
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
|
|||
|
|
|||
|
vNormal = normalize( transformedNormal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
|
|||
|
}
|
|||
|
`,Az=`
|
|||
|
#define PHONG
|
|||
|
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform vec3 emissive;
|
|||
|
uniform vec3 specular;
|
|||
|
uniform float shininess;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <dithering_pars_fragment>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <uv2_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <aomap_pars_fragment>
|
|||
|
#include <lightmap_pars_fragment>
|
|||
|
#include <emissivemap_pars_fragment>
|
|||
|
#include <envmap_pars_fragment>
|
|||
|
#include <gradientmap_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <bsdfs>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <lights_phong_pars_fragment>
|
|||
|
#include <shadowmap_pars_fragment>
|
|||
|
#include <bumpmap_pars_fragment>
|
|||
|
#include <normalmap_pars_fragment>
|
|||
|
#include <specularmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
|
|||
|
vec3 totalEmissiveRadiance = emissive;
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <specularmap_fragment>
|
|||
|
#include <normal_fragment_begin>
|
|||
|
#include <normal_fragment_maps>
|
|||
|
#include <emissivemap_fragment>
|
|||
|
|
|||
|
// accumulation
|
|||
|
#include <lights_phong_fragment>
|
|||
|
#include <lights_fragment_begin>
|
|||
|
#include <lights_fragment_maps>
|
|||
|
#include <lights_fragment_end>
|
|||
|
|
|||
|
// modulation
|
|||
|
#include <aomap_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;
|
|||
|
|
|||
|
#include <envmap_fragment>
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <dithering_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,gz=`
|
|||
|
#define PHONG
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <uv2_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <envmap_pars_vertex>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <shadowmap_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
#include <uv2_vertex>
|
|||
|
#include <color_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
|
|||
|
|
|||
|
vNormal = normalize( transformedNormal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <envmap_vertex>
|
|||
|
#include <shadowmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,mz=`
|
|||
|
#define PHYSICAL
|
|||
|
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform vec3 emissive;
|
|||
|
uniform float roughness;
|
|||
|
uniform float metalness;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#ifndef STANDARD
|
|||
|
uniform float clearCoat;
|
|||
|
uniform float clearCoatRoughness;
|
|||
|
#endif
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <dithering_pars_fragment>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <uv2_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <alphamap_pars_fragment>
|
|||
|
#include <aomap_pars_fragment>
|
|||
|
#include <lightmap_pars_fragment>
|
|||
|
#include <emissivemap_pars_fragment>
|
|||
|
#include <bsdfs>
|
|||
|
#include <cube_uv_reflection_fragment>
|
|||
|
#include <envmap_pars_fragment>
|
|||
|
#include <envmap_physical_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <lights_physical_pars_fragment>
|
|||
|
#include <shadowmap_pars_fragment>
|
|||
|
#include <bumpmap_pars_fragment>
|
|||
|
#include <normalmap_pars_fragment>
|
|||
|
#include <roughnessmap_pars_fragment>
|
|||
|
#include <metalnessmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
|
|||
|
vec3 totalEmissiveRadiance = emissive;
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphamap_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
#include <roughnessmap_fragment>
|
|||
|
#include <metalnessmap_fragment>
|
|||
|
#include <normal_fragment_begin>
|
|||
|
#include <normal_fragment_maps>
|
|||
|
#include <emissivemap_fragment>
|
|||
|
|
|||
|
// accumulation
|
|||
|
#include <lights_physical_fragment>
|
|||
|
#include <lights_fragment_begin>
|
|||
|
#include <lights_fragment_maps>
|
|||
|
#include <lights_fragment_end>
|
|||
|
|
|||
|
// modulation
|
|||
|
#include <aomap_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <dithering_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,pz=`
|
|||
|
#define PHYSICAL
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <uv2_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <shadowmap_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
#include <uv2_vertex>
|
|||
|
#include <color_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
|
|||
|
|
|||
|
vNormal = normalize( transformedNormal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <shadowmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,_z=`
|
|||
|
#define NORMAL
|
|||
|
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <packing>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <bumpmap_pars_fragment>
|
|||
|
#include <normalmap_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <normal_fragment_begin>
|
|||
|
#include <normal_fragment_maps>
|
|||
|
|
|||
|
gl_FragColor = vec4( packNormalToRGB( normal ), opacity );
|
|||
|
|
|||
|
}
|
|||
|
`,Fz=`
|
|||
|
#define NORMAL
|
|||
|
|
|||
|
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
|
|||
|
|
|||
|
varying vec3 vViewPosition;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#ifndef FLAT_SHADED
|
|||
|
|
|||
|
varying vec3 vNormal;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <displacementmap_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <skinning_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
#include <beginnormal_vertex>
|
|||
|
#include <morphnormal_vertex>
|
|||
|
#include <skinbase_vertex>
|
|||
|
#include <skinnormal_vertex>
|
|||
|
#include <defaultnormal_vertex>
|
|||
|
|
|||
|
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
|
|||
|
|
|||
|
vNormal = normalize( transformedNormal );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <skinning_vertex>
|
|||
|
#include <displacementmap_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
|
|||
|
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
|
|||
|
|
|||
|
vViewPosition = - mvPosition.xyz;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
}
|
|||
|
`,bz=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_fragment>
|
|||
|
#include <map_particle_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = vec3( 0.0 );
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_particle_fragment>
|
|||
|
#include <color_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
|
|||
|
outgoingLight = diffuseColor.rgb;
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <premultiplied_alpha_fragment>
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,vz=`
|
|||
|
uniform float size;
|
|||
|
uniform float scale;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <color_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <morphtarget_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <color_vertex>
|
|||
|
#include <begin_vertex>
|
|||
|
#include <morphtarget_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
|
|||
|
gl_PointSize = size;
|
|||
|
|
|||
|
#ifdef USE_SIZEATTENUATION
|
|||
|
|
|||
|
bool isPerspective = ( projectionMatrix[ 2 ][ 3 ] == - 1.0 );
|
|||
|
|
|||
|
if ( isPerspective ) gl_PointSize *= ( scale / - mvPosition.z );
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,yz=`
|
|||
|
uniform vec3 color;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <packing>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <bsdfs>
|
|||
|
#include <lights_pars_begin>
|
|||
|
#include <shadowmap_pars_fragment>
|
|||
|
#include <shadowmask_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
gl_FragColor = vec4( color, opacity * ( 1.0 - getShadowMask() ) );
|
|||
|
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,wz=`
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <shadowmap_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <begin_vertex>
|
|||
|
#include <project_vertex>
|
|||
|
#include <worldpos_vertex>
|
|||
|
#include <shadowmap_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`,Cz=`
|
|||
|
uniform vec3 diffuse;
|
|||
|
uniform float opacity;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_fragment>
|
|||
|
#include <map_pars_fragment>
|
|||
|
#include <fog_pars_fragment>
|
|||
|
#include <logdepthbuf_pars_fragment>
|
|||
|
#include <clipping_planes_pars_fragment>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <clipping_planes_fragment>
|
|||
|
|
|||
|
vec3 outgoingLight = vec3( 0.0 );
|
|||
|
vec4 diffuseColor = vec4( diffuse, opacity );
|
|||
|
|
|||
|
#include <logdepthbuf_fragment>
|
|||
|
#include <map_fragment>
|
|||
|
#include <alphatest_fragment>
|
|||
|
|
|||
|
outgoingLight = diffuseColor.rgb;
|
|||
|
|
|||
|
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
|
|||
|
|
|||
|
#include <tonemapping_fragment>
|
|||
|
#include <encodings_fragment>
|
|||
|
#include <fog_fragment>
|
|||
|
|
|||
|
}
|
|||
|
`,kz=`
|
|||
|
uniform float rotation;
|
|||
|
uniform vec2 center;
|
|||
|
|
|||
|
#include <common>
|
|||
|
#include <uv_pars_vertex>
|
|||
|
#include <fog_pars_vertex>
|
|||
|
#include <logdepthbuf_pars_vertex>
|
|||
|
#include <clipping_planes_pars_vertex>
|
|||
|
|
|||
|
void main() {
|
|||
|
|
|||
|
#include <uv_vertex>
|
|||
|
|
|||
|
vec4 mvPosition = modelViewMatrix * vec4( 0.0, 0.0, 0.0, 1.0 );
|
|||
|
|
|||
|
vec2 scale;
|
|||
|
scale.x = length( vec3( modelMatrix[ 0 ].x, modelMatrix[ 0 ].y, modelMatrix[ 0 ].z ) );
|
|||
|
scale.y = length( vec3( modelMatrix[ 1 ].x, modelMatrix[ 1 ].y, modelMatrix[ 1 ].z ) );
|
|||
|
|
|||
|
#ifndef USE_SIZEATTENUATION
|
|||
|
|
|||
|
bool isPerspective = ( projectionMatrix[ 2 ][ 3 ] == - 1.0 );
|
|||
|
|
|||
|
if ( isPerspective ) scale *= - mvPosition.z;
|
|||
|
|
|||
|
#endif
|
|||
|
|
|||
|
vec2 alignedPosition = ( position.xy - ( center - vec2( 0.5 ) ) ) * scale;
|
|||
|
|
|||
|
vec2 rotatedPosition;
|
|||
|
rotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;
|
|||
|
rotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;
|
|||
|
|
|||
|
mvPosition.xy += rotatedPosition;
|
|||
|
|
|||
|
gl_Position = projectionMatrix * mvPosition;
|
|||
|
|
|||
|
#include <logdepthbuf_vertex>
|
|||
|
#include <clipping_planes_vertex>
|
|||
|
#include <fog_vertex>
|
|||
|
|
|||
|
}
|
|||
|
`;var jt={alphamap_fragment:gj,alphamap_pars_fragment:mj,alphatest_fragment:pj,aomap_fragment:_j,aomap_pars_fragment:Fj,begin_vertex:bj,beginnormal_vertex:vj,bsdfs:yj,bumpmap_pars_fragment:wj,clipping_planes_fragment:Cj,clipping_planes_pars_fragment:kj,clipping_planes_pars_vertex:Ej,clipping_planes_vertex:Bj,color_fragment:Sj,color_pars_fragment:Dj,color_pars_vertex:xj,color_vertex:Tj,common:Ij,cube_uv_reflection_fragment:Pj,defaultnormal_vertex:Mj,displacementmap_pars_vertex:Lj,displacementmap_vertex:Rj,emissivemap_fragment:Uj,emissivemap_pars_fragment:Oj,encodings_fragment:Nj,encodings_pars_fragment:Hj,envmap_fragment:Qj,envmap_pars_fragment:jj,envmap_pars_vertex:Gj,envmap_physical_pars_fragment:eG,envmap_vertex:zj,fog_vertex:qj,fog_pars_vertex:Vj,fog_fragment:Wj,fog_pars_fragment:Kj,gradientmap_pars_fragment:Yj,lightmap_fragment:Xj,lightmap_pars_fragment:Jj,lights_lambert_vertex:Zj,lights_pars_begin:$j,lights_phong_fragment:tG,lights_phong_pars_fragment:iG,lights_physical_fragment:nG,lights_physical_pars_fragment:aG,lights_fragment_begin:sG,lights_fragment_maps:rG,lights_fragment_end:oG,logdepthbuf_fragment:lG,logdepthbuf_pars_fragment:uG,logdepthbuf_pars_vertex:cG,logdepthbuf_vertex:dG,map_fragment:fG,map_pars_fragment:hG,map_particle_fragment:AG,map_particle_pars_fragment:gG,metalnessmap_fragment:mG,metalnessmap_pars_fragment:pG,morphnormal_vertex:_G,morphtarget_pars_vertex:FG,morphtarget_vertex:bG,normal_fragment_begin:vG,normal_fragment_maps:yG,normalmap_pars_fragment:wG,packing:CG,premultiplied_alpha_fragment:kG,project_vertex:EG,dithering_fragment:BG,dithering_pars_fragment:SG,roughnessmap_fragment:DG,roughnessmap_pars_fragment:xG,shadowmap_pars_fragment:TG,shadowmap_pars_vertex:IG,shadowmap_vertex:PG,shadowmask_pars_fragment:MG,skinbase_vertex:LG,skinning_pars_vertex:RG,skinning_vertex:UG,skinnormal_vertex:OG,specularmap_fragment:NG,specularmap_pars_fragment:HG,tonemapping_fragment:QG,tonemapping_pars_fragment:jG,uv_pars_fragment:GG,uv_pars_vertex:zG,uv_vertex:qG,uv2_pars_fragment:VG,uv2_pars_vertex:WG,uv2_vertex:KG,worldpos_vertex:YG,background_frag:XG,background_vert:JG,cube_frag:ZG,cube_vert:$G,depth_frag:ez,depth_vert:tz,distanceRGBA_frag:iz,distanceRGBA_vert:nz,equirect_frag:az,equirect_vert:sz,linedashed_frag:rz,linedashed_vert:oz,meshbasic_frag:lz,meshbasic_vert:uz,meshlambert_frag:cz,meshlambert_vert:dz,meshmatcap_frag:fz,meshmatcap_vert:hz,meshphong_frag:Az,meshphong_vert:gz,meshphysical_frag:mz,meshphysical_vert:pz,normal_frag:_z,normal_vert:Fz,points_frag:bz,points_vert:vz,shadow_frag:yz,shadow_vert:wz,sprite_frag:Cz,sprite_vert:kz};function fc(e){var t={};for(var i in e){t[i]={};for(var n in e[i]){var a=e[i][n];a&&(a.isColor||a.isMatrix3||a.isMatrix4||a.isVector2||a.isVector3||a.isVector4||a.isTexture)?t[i][n]=a.clone():Array.isArray(a)?t[i][n]=a.slice():t[i][n]=a}}return t}function jn(e){for(var t={},i=0;i<e.length;i++){var n=fc(e[i]);for(var a in n)t[a]=n[a]}return t}var Ez={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush
|
|||
|
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
|
|||
|
}`,this.fragmentShader=`void main() {
|
|||
|
gl_FragColor = vec4( 1.0, 0.0, 0.0, 1.0 );
|
|||
|
}`,this.linewidth=1,this.wireframe=!1,this.wireframeLinewidth=1,this.fog=!1,this.lights=!1,this.clipping=!1,this.skinning=!1,this.morphTargets=!1,this.morphNormals=!1,this.extensions={derivatives:!1,fragDepth:!1,drawBuffers:!1,shaderTextureLOD:!1},this.defaultAttributeValues={color:[1,1,1],uv:[0,0],uv2:[0,0]},this.index0AttributeName=void 0,this.uniformsNeedUpdate=!1,e!==void 0&&(e.attributes!==void 0&&console.error("THREE.ShaderMaterial: attributes should now be defined in THREE.BufferGeometry instead."),this.setValues(e))}Yn.prototype=Object.create(Bt.prototype);Yn.prototype.constructor=Yn;Yn.prototype.isShaderMaterial=!0;Yn.prototype.copy=function(e){return Bt.prototype.copy.call(this,e),this.fragmentShader=e.fragmentShader,this.vertexShader=e.vertexShader,this.uniforms=fc(e.uniforms),this.defines=Object.assign({},e.defines),this.wireframe=e.wireframe,this.wireframeLinewidth=e.wireframeLinewidth,this.lights=e.lights,this.clipping=e.clipping,this.skinning=e.skinning,this.morphTargets=e.morphTargets,this.morphNormals=e.morphNormals,this.extensions=e.extensions,this};Yn.prototype.toJSON=function(e){var t=Bt.prototype.toJSON.call(this,e);t.uniforms={};for(var i in this.uniforms){var n=this.uniforms[i],a=n.value;a&&a.isTexture?t.uniforms[i]={type:"t",value:a.toJSON(e).uuid}:a&&a.isColor?t.uniforms[i]={type:"c",value:a.getHex()}:a&&a.isVector2?t.uniforms[i]={type:"v2",value:a.toArray()}:a&&a.isVector3?t.uniforms[i]={type:"v3",value:a.toArray()}:a&&a.isVector4?t.uniforms[i]={type:"v4",value:a.toArray()}:a&&a.isMatrix3?t.uniforms[i]={type:"m3",value:a.toArray()}:a&&a.isMatrix4?t.uniforms[i]={type:"m4",value:a.toArray()}:t.uniforms[i]={value:a}}Object.keys(this.defines).length>0&&(t.defines=this.defines),t.vertexShader=this.vertexShader,t.fragmentShader=this.fragmentShader;var s={};for(var r in this.extensions)this.extensions[r]===!0&&(s[r]=!0);return Object.keys(s).length>0&&(t.extensions=s),t};function zl(e,t){this.origin=e!==void 0?e:new ee,this.direction=t!==void 0?t:new ee}Object.assign(zl.prototype,{set:function(e,t){return this.origin.copy(e),this.direction.copy(t),this},clone:function(){return new this.constructor().copy(this)},copy:function(e){return this.origin.copy(e.origin),this.direction.copy(e.direction),this},at:function(e,t){return t===void 0&&(console.warn("THREE.Ray: .at() target is now required"),t=new ee),t.copy(this.direction).multiplyScalar(e).add(this.origin)},lookAt:function(e){return this.direction.copy(e).sub(this.origin).normalize(),this},recast:function(){var e=new ee;return function(i){return this.origin.copy(this.at(i,e)),this}}(),closestPointToPoint:function(e,t){t===void 0&&(console.warn("THREE.Ray: .closestPointToPoint() target is now required"),t=new ee),t.subVectors(e,this.origin);var i=t.dot(this.direction);return i<0?t.copy(this.origin):t.copy(this.direction).multiplyScalar(i).add(this.origin)},distanceToPoint:function(e){return Math.sqrt(this.distanceSqToPoint(e))},distanceSqToPoint:function(){var e=new ee;return function(i){var n=e.subVectors(i,this.origin).dot(this.direction);return n<0?this.origin.distanceToSquared(i):(e.copy(this.direction).multiplyScalar(n).add(this.origin),e.distanceToSquared(i))}}(),distanceSqToSegment:function(){var e=new ee,t=new ee,i=new ee;return function(a,s,r,o){e.copy(a).add(s).multiplyScalar(.5),t.copy(s).sub(a).normalize(),i.copy(this.origin).sub(e);var l=a.distanceTo(s)*.5,c=-this.direction.dot(t),f=i.dot(this.direction),h=-i.dot(t),A=i.lengthSq(),m=Math.abs(1-c*c),F,y,k,C;if(m>0)if(F=c*h-f,y=c*f-h,C=l*m,F>=0)if(y>=-C)if(y<=C){var w=1/m;F*=w,y*=w,k=F*(F+c*y+2*f)+y*(c*F+y+2*h)+A}else y=l,F=Math.max(0,-(c*y+f)),k=-F*F+y*(y+2*h)+A;else y=-l,F=Math.max(0,-(c*y+f)),k=-F*F+y*(y+2*h)+A;else y<=-C?(F=Math.max(0,-(-c*l+f)),y=F>0?-l:Math.min(Math.max(-l,-h),l),k=-F*F+y*(y+2*h)+A):y<=C?(F=0,y=Math.min(Math.max(-l,-h),l),k=y*(y+2*h)+A):(F=Math.max(0,-(c*l+f)),y=F>0?l:Math.min(Math.max(-l,-h),l),k=-F*F+y*(y+2*h)+A);else y=c>0?-l:l,F=Math.max(0,-(c*y+f)),k=-F*F+y*(y+2*h)+A;return r&&r.copy(this.direction).multiplyScalar(F).add(this.origin),o&&o.copy(t).multiplyScalar(y
|
|||
|
`),i=0;i<t.length;i++)t[i]=i+1+": "+t[i];return t.join(`
|
|||
|
`)}function Yb(e,t,i){var n=e.createShader(t);return e.shaderSource(n,i),e.compileShader(n),e.getShaderParameter(n,e.COMPILE_STATUS)===!1&&console.error("THREE.WebGLShader: Shader couldn't compile."),e.getShaderInfoLog(n)!==""&&console.warn("THREE.WebGLShader: gl.getShaderInfoLog()",t===e.VERTEX_SHADER?"vertex":"fragment",e.getShaderInfoLog(n),gq(i)),n}var mq=0;function A4(e){switch(e){case ih:return["Linear","( value )"];case rj:return["sRGB","( value )"];case oj:return["RGBE","( value )"];case lj:return["RGBM","( value, 7.0 )"];case uj:return["RGBM","( value, 16.0 )"];case cj:return["RGBD","( value, 256.0 )"];case t4:return["Gamma","( value, float( GAMMA_FACTOR ) )"];default:throw new Error("unsupported encoding: "+e)}}function Od(e,t){var i=A4(t);return"vec4 "+e+"( vec4 value ) { return "+i[0]+"ToLinear"+i[1]+"; }"}function pq(e,t){var i=A4(t);return"vec4 "+e+"( vec4 value ) { return LinearTo"+i[0]+i[1]+"; }"}function _q(e,t){var i;switch(t){case Y3:i="Linear";break;case EQ:i="Reinhard";break;case BQ:i="Uncharted2";break;case SQ:i="OptimizedCineon";break;case DQ:i="ACESFilmic";break;default:throw new Error("unsupported toneMapping: "+t)}return"vec3 "+e+"( vec3 color ) { return "+i+"ToneMapping( color ); }"}function Fq(e,t,i){e=e||{};var n=[e.derivatives||t.envMapCubeUV||t.bumpMap||t.normalMap&&!t.objectSpaceNormalMap||t.flatShading?"#extension GL_OES_standard_derivatives : enable":"",(e.fragDepth||t.logarithmicDepthBuffer)&&i.get("EXT_frag_depth")?"#extension GL_EXT_frag_depth : enable":"",e.drawBuffers&&i.get("WEBGL_draw_buffers")?"#extension GL_EXT_draw_buffers : require":"",(e.shaderTextureLOD||t.envMap)&&i.get("EXT_shader_texture_lod")?"#extension GL_EXT_shader_texture_lod : enable":""];return n.filter(bu).join(`
|
|||
|
`)}function bq(e){var t=[];for(var i in e){var n=e[i];n!==!1&&t.push("#define "+i+" "+n)}return t.join(`
|
|||
|
`)}function vq(e,t){for(var i={},n=e.getProgramParameter(t,e.ACTIVE_ATTRIBUTES),a=0;a<n;a++){var s=e.getActiveAttrib(t,a),r=s.name;i[r]=e.getAttribLocation(t,r)}return i}function bu(e){return e!==""}function Xb(e,t){return e.replace(/NUM_DIR_LIGHTS/g,t.numDirLights).replace(/NUM_SPOT_LIGHTS/g,t.numSpotLights).replace(/NUM_RECT_AREA_LIGHTS/g,t.numRectAreaLights).replace(/NUM_POINT_LIGHTS/g,t.numPointLights).replace(/NUM_HEMI_LIGHTS/g,t.numHemiLights)}function Jb(e,t){return e.replace(/NUM_CLIPPING_PLANES/g,t.numClippingPlanes).replace(/UNION_CLIPPING_PLANES/g,t.numClippingPlanes-t.numClipIntersection)}function Tp(e){var t=/^[ \t]*#include +<([\w\d./]+)>/gm;function i(n,a){var s=jt[a];if(s===void 0)throw new Error("Can not resolve #include <"+a+">");return Tp(s)}return e.replace(t,i)}function Zb(e){var t=/#pragma unroll_loop[\s]+?for \( int i \= (\d+)\; i < (\d+)\; i \+\+ \) \{([\s\S]+?)(?=\})\}/g;function i(n,a,s,r){for(var o="",l=parseInt(a);l<parseInt(s);l++)o+=r.replace(/\[ i \]/g,"[ "+l+" ]");return o}return e.replace(t,i)}function yq(e,t,i,n,a,s,r){var o=e.context,l=n.defines,c=a.vertexShader,f=a.fragmentShader,h="SHADOWMAP_TYPE_BASIC";s.shadowMapType===j3?h="SHADOWMAP_TYPE_PCF":s.shadowMapType===sQ&&(h="SHADOWMAP_TYPE_PCF_SOFT");var A="ENVMAP_TYPE_CUBE",m="ENVMAP_MODE_REFLECTION",F="ENVMAP_BLENDING_MULTIPLY";if(s.envMap){switch(n.envMap.mapping){case S1:case wp:A="ENVMAP_TYPE_CUBE";break;case D1:case x1:A="ENVMAP_TYPE_CUBE_UV";break;case X3:case Cp:A="ENVMAP_TYPE_EQUIREC";break;case J3:A="ENVMAP_TYPE_SPHERE";break}switch(n.envMap.mapping){case wp:case Cp:m="ENVMAP_MODE_REFRACTION";break}switch(n.combine){case IA:F="ENVMAP_BLENDING_MULTIPLY";break;case CQ:F="ENVMAP_BLENDING_MIX";break;case kQ:F="ENVMAP_BLENDING_ADD";break}}var y=e.gammaFactor>0?e.gammaFactor:1,k=r.isWebGL2?"":Fq(n.extensions,s,t),C=bq(l),w=o.createProgram(),B,S;if(n.isRawShaderMaterial?(B=[C].filter(bu).join(`
|
|||
|
`),B.length>0&&(B+=`
|
|||
|
`),S=[k,C].filter(bu).join(`
|
|||
|
`),S.length>0&&(S+=`
|
|||
|
`)):(B=["precision "+s.precision+" float;","precision "+s.precision+" int;","#define SHADER_NAME "+a.name,C,s.supportsVertexTextures?"#define VERTEX_TEXTURES":"","#define GAMMA_FACTOR "+y,"#define MAX_BONES "+s.maxBones,s.useFog&&s.fog?"#define USE_FOG":"",s.useFog&&s.fogExp?"#define FOG_EXP2":"",s.map?"#define USE_MAP":"",s.envMap?"#define USE_ENVMAP":"",s.envMap?"#define "+m:"",s.lightMap?"#define USE_LIGHTMAP":"",s.aoMap?"#define USE_AOMAP":"",s.emissiveMap?"#define USE_EMISSIVEMAP":"",s.bumpMap?"#define USE_BUMPMAP":"",s.normalMap?"#define USE_NORMALMAP":"",s.normalMap&&s.objectSpaceNormalMap?"#define OBJECTSPACE_NORMALMAP":"",s.displacementMap&&s.supportsVertexTextures?"#define USE_DISPLACEMENTMAP":"",s.specularMap?"#define USE_SPECULARMAP":"",s.roughnessMap?"#define USE_ROUGHNESSMAP":"",s.metalnessMap?"#define USE_METALNESSMAP":"",s.alphaMap?"#define USE_ALPHAMAP":"",s.vertexColors?"#define USE_COLOR":"",s.flatShading?"#define FLAT_SHADED":"",s.skinning?"#define USE_SKINNING":"",s.useVertexTexture?"#define BONE_TEXTURE":"",s.morphTargets?"#define USE_MORPHTARGETS":"",s.morphNormals&&s.flatShading===!1?"#define USE_MORPHNORMALS":"",s.doubleSided?"#define DOUBLE_SIDED":"",s.flipSided?"#define FLIP_SIDED":"",s.shadowMapEnabled?"#define USE_SHADOWMAP":"",s.shadowMapEnabled?"#define "+h:"",s.sizeAttenuation?"#define USE_SIZEATTENUATION":"",s.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"",s.logarithmicDepthBuffer&&(r.isWebGL2||t.get("EXT_frag_depth"))?"#define USE_LOGDEPTHBUF_EXT":"","uniform mat4 modelMatrix;","uniform mat4 modelViewMatrix;","uniform mat4 projectionMatrix;","uniform mat4 viewMatrix;","uniform mat3 normalMatrix;","uniform vec3 cameraPosition;","attribute vec3 position;","attribute vec3 normal;","attribute vec2 uv;","#ifdef USE_COLOR"," attribute vec3 color;","#endif","#ifdef USE_MORPHTARGETS"," attribute vec3 morphTarget0;"," attribute vec3 morphTarget1;"," attribute vec3 morphTarget2;"," attribute vec3 morphTarget3;"," #ifdef USE_MORPHNORMALS"," attribute vec3 morphNormal0;"," attribute vec3 morphNormal1;"," attribute vec3 morphNormal2;"," attribute vec3 morphNormal3;"," #else"," attribute vec3 morphTarget4;"," attribute vec3 morphTarget5;"," attribute vec3 morphTarget6;"," attribute vec3 morphTarget7;"," #endif","#endif","#ifdef USE_SKINNING"," attribute vec4 skinIndex;"," attribute vec4 skinWeight;","#endif",`
|
|||
|
`].filter(bu).join(`
|
|||
|
`),S=[k,"precision "+s.precision+" float;","precision "+s.precision+" int;","#define SHADER_NAME "+a.name,C,s.alphaTest?"#define ALPHATEST "+s.alphaTest+(s.alphaTest%1?"":".0"):"","#define GAMMA_FACTOR "+y,s.useFog&&s.fog?"#define USE_FOG":"",s.useFog&&s.fogExp?"#define FOG_EXP2":"",s.map?"#define USE_MAP":"",s.matcap?"#define USE_MATCAP":"",s.envMap?"#define USE_ENVMAP":"",s.envMap?"#define "+A:"",s.envMap?"#define "+m:"",s.envMap?"#define "+F:"",s.lightMap?"#define USE_LIGHTMAP":"",s.aoMap?"#define USE_AOMAP":"",s.emissiveMap?"#define USE_EMISSIVEMAP":"",s.bumpMap?"#define USE_BUMPMAP":"",s.normalMap?"#define USE_NORMALMAP":"",s.normalMap&&s.objectSpaceNormalMap?"#define OBJECTSPACE_NORMALMAP":"",s.specularMap?"#define USE_SPECULARMAP":"",s.roughnessMap?"#define USE_ROUGHNESSMAP":"",s.metalnessMap?"#define USE_METALNESSMAP":"",s.alphaMap?"#define USE_ALPHAMAP":"",s.vertexColors?"#define USE_COLOR":"",s.gradientMap?"#define USE_GRADIENTMAP":"",s.flatShading?"#define FLAT_SHADED":"",s.doubleSided?"#define DOUBLE_SIDED":"",s.flipSided?"#define FLIP_SIDED":"",s.shadowMapEnabled?"#define USE_SHADOWMAP":"",s.shadowMapEnabled?"#define "+h:"",s.premultipliedAlpha?"#define PREMULTIPLIED_ALPHA":"",s.physicallyCorrectLights?"#define PHYSICALLY_CORRECT_LIGHTS":"",s.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"",s.logarithmicDepthBuffer&&(r.isWebGL2||t.get("EXT_frag_depth"))?"#define USE_LOGDEPTHBUF_EXT":"",s.envMap&&(r.isWebGL2||t.get("EXT_shader_texture_lod"))?"#define TEXTURE_LOD_EXT":"","uniform mat4 viewMatrix;","uniform vec3 cameraPosition;",s.toneMapping!==am?"#define TONE_MAPPING":"",s.toneMapping!==am?jt.tonemapping_pars_fragment:"",s.toneMapping!==am?_q("toneMapping",s.toneMapping):"",s.dithering?"#define DITHERING":"",s.outputEncoding||s.mapEncoding||s.matcapEncoding||s.envMapEncoding||s.emissiveMapEncoding?jt.encodings_pars_fragment:"",s.mapEncoding?Od("mapTexelToLinear",s.mapEncoding):"",s.matcapEncoding?Od("matcapTexelToLinear",s.matcapEncoding):"",s.envMapEncoding?Od("envMapTexelToLinear",s.envMapEncoding):"",s.emissiveMapEncoding?Od("emissiveMapTexelToLinear",s.emissiveMapEncoding):"",s.outputEncoding?pq("linearToOutputTexel",s.outputEncoding):"",s.depthPacking?"#define DEPTH_PACKING "+n.depthPacking:"",`
|
|||
|
`].filter(bu).join(`
|
|||
|
`)),c=Tp(c),c=Xb(c,s),c=Jb(c,s),f=Tp(f),f=Xb(f,s),f=Jb(f,s),c=Zb(c),f=Zb(f),r.isWebGL2&&!n.isRawShaderMaterial){var U=!1,N=/^\s*#version\s+300\s+es\s*\n/;n.isShaderMaterial&&c.match(N)!==null&&f.match(N)!==null&&(U=!0,c=c.replace(N,""),f=f.replace(N,"")),B=[`#version 300 es
|
|||
|
`,"#define attribute in","#define varying out","#define texture2D texture"].join(`
|
|||
|
`)+`
|
|||
|
`+B,S=[`#version 300 es
|
|||
|
`,"#define varying in",U?"":"out highp vec4 pc_fragColor;",U?"":"#define gl_FragColor pc_fragColor","#define gl_FragDepthEXT gl_FragDepth","#define texture2D texture","#define textureCube texture","#define texture2DProj textureProj","#define texture2DLodEXT textureLod","#define texture2DProjLodEXT textureProjLod","#define textureCubeLodEXT textureLod","#define texture2DGradEXT textureGrad","#define texture2DProjGradEXT textureProjGrad","#define textureCubeGradEXT textureGrad"].join(`
|
|||
|
`)+`
|
|||
|
`+S}var z=B+c,Q=S+f,R=Yb(o,o.VERTEX_SHADER,z),q=Yb(o,o.FRAGMENT_SHADER,Q);o.attachShader(w,R),o.attachShader(w,q),n.index0AttributeName!==void 0?o.bindAttribLocation(w,0,n.index0AttributeName):s.morphTargets===!0&&o.bindAttribLocation(w,0,"position"),o.linkProgram(w);var J=o.getProgramInfoLog(w).trim(),ae=o.getShaderInfoLog(R).trim(),Fe=o.getShaderInfoLog(q).trim(),ge=!0,le=!0;o.getProgramParameter(w,o.LINK_STATUS)===!1?(ge=!1,console.error("THREE.WebGLProgram: shader error: ",o.getError(),"gl.VALIDATE_STATUS",o.getProgramParameter(w,o.VALIDATE_STATUS),"gl.getProgramInfoLog",J,ae,Fe)):J!==""?console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",J):(ae===""||Fe==="")&&(le=!1),le&&(this.diagnostics={runnable:ge,material:n,programLog:J,vertexShader:{log:ae,prefix:B},fragmentShader:{log:Fe,prefix:S}}),o.deleteShader(R),o.deleteShader(q);var re;this.getUniforms=function(){return re===void 0&&(re=new pr(o,w,e)),re};var pe;return this.getAttributes=function(){return pe===void 0&&(pe=vq(o,w)),pe},this.destroy=function(){o.deleteProgram(w),this.program=void 0},Object.defineProperties(this,{uniforms:{get:function(){return console.warn("THREE.WebGLProgram: .uniforms is now .getUniforms()."),this.getUniforms()}},attributes:{get:function(){return console.warn("THREE.WebGLProgram: .attributes is now .getAttributes()."),this.getAttributes()}}}),this.name=a.name,this.id=mq++,this.code=i,this.usedTimes=1,this.program=w,this.vertexShader=R,this.fragmentShader=q,this}function wq(e,t,i){var n=[],a={MeshDepthMaterial:"depth",MeshDistanceMaterial:"distanceRGBA",MeshNormalMaterial:"normal",MeshBasicMaterial:"basic",MeshLambertMaterial:"lambert",MeshPhongMaterial:"phong",MeshToonMaterial:"phong",MeshStandardMaterial:"physical",MeshPhysicalMaterial:"physical",MeshMatcapMaterial:"matcap",LineBasicMaterial:"basic",LineDashedMaterial:"dashed",PointsMaterial:"points",ShadowMaterial:"shadow",SpriteMaterial:"sprite"},s=["precision","supportsVertexTextures","map","mapEncoding","matcap","matcapEncoding","envMap","envMapMode","envMapEncoding","lightMap","aoMap","emissiveMap","emissiveMapEncoding","bumpMap","normalMap","objectSpaceNormalMap","displacementMap","specularMap","roughnessMap","metalnessMap","gradientMap","alphaMap","combine","vertexColors","fog","useFog","fogExp","flatShading","sizeAttenuation","logarithmicDepthBuffer","skinning","maxBones","useVertexTexture","morphTargets","morphNormals","maxMorphTargets","maxMorphNormals","premultipliedAlpha","numDirLights","numPointLights","numSpotLights","numHemiLights","numRectAreaLights","shadowMapEnabled","shadowMapType","toneMapping","physicallyCorrectLights","alphaTest","doubleSided","flipSided","numClippingPlanes","numClipIntersection","depthPacking","dithering"];function r(l){var c=l.skeleton,f=c.bones;if(i.floatVertexTextures)return 1024;var h=i.maxVertexUniforms,A=Math.floor((h-20)/4),m=Math.min(A,f.length);return m<f.length?(console.warn("THREE.WebGLRenderer: Skeleton has "+f.length+" bones. This GPU supports "+m+"."),0):m}function o(l,c){var f;return l?l.isTexture?f=l.encoding:l.isWebGLRenderTarget&&(console.warn("THREE.WebGLPrograms.getTextureEncodingFromMap: don't use render targets as textures. Use their .texture property instead."),f=l.texture.encoding):f=ih,f===ih&&c&&(f=t4),f}this.getParameters=function(l,c,f,h,A,m,F){var y=a[l.type],k=F.isSkinnedMesh?r(F):0,C=i.precision;l.precision!==null&&(C=i.getMaxPrecision(l.precision),C!==l.precision&&console.warn("THREE.WebGLProgram.getParameters:",l.precision,"not supported, using",C,"instead."));var w=e.getRenderTarget(),B={shaderID:y,precision:C,supportsVertexTextures:i.vertexTextures,outputEncoding:o(w?w.texture:null,e.gammaOutput),map:!!l.map,mapEncoding:o(l.map,e.gammaInput),matcap:!!l.matcap,matcapEncoding:o(l.matcap,e.gammaInput),envMap:!!l.envMap,envMapMode:l.envMap&&l.envMap.mapping,envMapEncoding:o(l.envMap,e.gammaInput),envMapCubeUV:!!l.envMap&&(l.envMap.mapping===D1||l.envMap.mapping===x1),lightMap:!!l.lightMap,aoMap:!!l.aoMap,emissiveMap:!!l.emissiveMap,emissiveMapEncoding:o(l.emissiveMap,e.gammaInput),bumpMap:!!l.bumpMap,normalM
|
|||
|
Object.assign(Aa.prototype,{beforeStart_:Aa.prototype.copySampleValue_,afterEnd_:Aa.prototype.copySampleValue_});function Op(e,t,i,n){Aa.call(this,e,t,i,n),this._weightPrev=-0,this._offsetPrev=-0,this._weightNext=-0,this._offsetNext=-0}Op.prototype=Object.assign(Object.create(Aa.prototype),{constructor:Op,DefaultSettings_:{endingStart:pl,endingEnd:pl},intervalChanged_:function(e,t,i){var n=this.parameterPositions,a=e-2,s=e+1,r=n[a],o=n[s];if(r===void 0)switch(this.getSettings_().endingStart){case $o:a=e,r=2*t-i;break;case th:a=n.length-2,r=t+n[a]-n[a+1];break;default:a=e,r=i}if(o===void 0)switch(this.getSettings_().endingEnd){case $o:s=e,o=2*i-t;break;case th:s=1,o=i+n[1]-n[0];break;default:s=e-1,o=t}var l=(i-t)*.5,c=this.valueSize;this._weightPrev=l/(t-r),this._weightNext=l/(o-i),this._offsetPrev=a*c,this._offsetNext=s*c},interpolate_:function(e,t,i,n){for(var a=this.resultBuffer,s=this.sampleValues,r=this.valueSize,o=e*r,l=o-r,c=this._offsetPrev,f=this._offsetNext,h=this._weightPrev,A=this._weightNext,m=(i-t)/(n-t),F=m*m,y=F*m,k=-h*y+2*h*F-h*m,C=(1+h)*y+(-1.5-2*h)*F+(-.5+h)*m+1,w=(-1-A)*y+(1.5+A)*F+.5*m,B=A*y-A*F,S=0;S!==r;++S)a[S]=k*s[c+S]+C*s[l+S]+w*s[o+S]+B*s[f+S];return a}});function Ih(e,t,i,n){Aa.call(this,e,t,i,n)}Ih.prototype=Object.assign(Object.create(Aa.prototype),{constructor:Ih,interpolate_:function(e,t,i,n){for(var a=this.resultBuffer,s=this.sampleValues,r=this.valueSize,o=e*r,l=o-r,c=(i-t)/(n-t),f=1-c,h=0;h!==r;++h)a[h]=s[l+h]*f+s[o+h]*c;return a}});function Np(e,t,i,n){Aa.call(this,e,t,i,n)}Np.prototype=Object.assign(Object.create(Aa.prototype),{constructor:Np,interpolate_:function(e){return this.copySampleValue_(e-1)}});function vn(e,t,i,n){if(e===void 0)throw new Error("THREE.KeyframeTrack: track name is undefined");if(t===void 0||t.length===0)throw new Error("THREE.KeyframeTrack: no keyframes in track named "+e);this.name=e,this.times=xn.convertArray(t,this.TimeBufferType),this.values=xn.convertArray(i,this.ValueBufferType),this.setInterpolation(n||this.DefaultInterpolation)}Object.assign(vn,{toJSON:function(e){var t=e.constructor,i;if(t.toJSON!==void 0)i=t.toJSON(e);else{i={name:e.name,times:xn.convertArray(e.times,Array),values:xn.convertArray(e.values,Array)};var n=e.getInterpolation();n!==e.DefaultInterpolation&&(i.interpolation=n)}return i.type=e.ValueTypeName,i}});Object.assign(vn.prototype,{constructor:vn,TimeBufferType:Float32Array,ValueBufferType:Float32Array,DefaultInterpolation:Sf,InterpolantFactoryMethodDiscrete:function(e){return new Np(this.times,this.values,this.getValueSize(),e)},InterpolantFactoryMethodLinear:function(e){return new Ih(this.times,this.values,this.getValueSize(),e)},InterpolantFactoryMethodSmooth:function(e){return new Op(this.times,this.values,this.getValueSize(),e)},setInterpolation:function(e){var t;switch(e){case eh:t=this.InterpolantFactoryMethodDiscrete;break;case Sf:t=this.InterpolantFactoryMethodLinear;break;case sm:t=this.InterpolantFactoryMethodSmooth;break}if(t===void 0){var i="unsupported interpolation for "+this.ValueTypeName+" keyframe track named "+this.name;if(this.createInterpolant===void 0)if(e!==this.DefaultInterpolation)this.setInterpolation(this.DefaultInterpolation);else throw new Error(i);return console.warn("THREE.KeyframeTrack:",i),this}return this.createInterpolant=t,this},getInterpolation:function(){switch(this.createInterpolant){case this.InterpolantFactoryMethodDiscrete:return eh;case this.InterpolantFactoryMethodLinear:return Sf;case this.InterpolantFactoryMethodSmooth:return sm}},getValueSize:function(){return this.values.length/this.times.length},shift:function(e){if(e!==0)for(var t=this.times,i=0,n=t.length;i!==n;++i)t[i]+=e;return this},scale:function(e){if(e!==1)for(var t=this.times,i=0,n=t.length;i!==n;++i)t[i]*=e;return this},trim:function(e,t){for(var i=this.times,n=i.length,a=0,s=n-1;a!==n&&i[a]<e;)++a;for(;s!==-1&&i[s]>t;)--s;if(++s,a!==0||s!==n){a>=s&&(s=Math.max(s,1),a=s-1);var r=this.getValueSize();this.times=xn.arraySlice(i,a,s),this.values=xn.arraySlice(this.values,a*r,s*r)}return this},validate:function(){var e=!0,t=this.g
|
|||
|
`)o=0,l-=s;else{var h=AV(f,a,o,l,i);o+=h.offsetX,r.push(h.path)}}return r}function AV(e,t,i,n,a){var s=a.glyphs[e]||a.glyphs["?"];if(s){var r=new D4,o,l,c,f,h,A,m,F;if(s.o)for(var y=s._cachedOutline||(s._cachedOutline=s.o.split(" ")),k=0,C=y.length;k<C;){var w=y[k++];switch(w){case"m":o=y[k++]*t+i,l=y[k++]*t+n,r.moveTo(o,l);break;case"l":o=y[k++]*t+i,l=y[k++]*t+n,r.lineTo(o,l);break;case"q":c=y[k++]*t+i,f=y[k++]*t+n,h=y[k++]*t+i,A=y[k++]*t+n,r.quadraticCurveTo(h,A,c,f);break;case"b":c=y[k++]*t+i,f=y[k++]*t+n,h=y[k++]*t+i,A=y[k++]*t+n,m=y[k++]*t+i,F=y[k++]*t+n,r.bezierCurveTo(h,A,m,F,c,f);break}}return{offsetX:s.ha*t,path:r}}}function gV(e){this.manager=e!==void 0?e:$n}Object.assign(gV.prototype,{load:function(e,t,i,n){var a=this,s=new Gs(this.manager);s.setPath(this.path),s.load(e,function(r){var o;try{o=JSON.parse(r)}catch{console.warn("THREE.FontLoader: typeface.js support is being deprecated. Use typeface.json instead."),o=JSON.parse(r.substring(65,r.length-2))}var l=a.parse(o);t&&t(l)},i,n)},parse:function(e){return new x4(e)},setPath:function(e){return this.path=e,this}});function Mh(){}Mh.Handlers={handlers:[],add:function(e,t){this.handlers.push(e,t)},get:function(e){for(var t=this.handlers,i=0,n=t.length;i<n;i+=2){var a=t[i],s=t[i+1];if(a.test(e))return s}return null}};Object.assign(Mh.prototype,{crossOrigin:"anonymous",onLoadStart:function(){},onLoadProgress:function(){},onLoadComplete:function(){},initMaterials:function(e,t,i){for(var n=[],a=0;a<e.length;++a)n[a]=this.createMaterial(e[a],t,i);return n},createMaterial:function(){var e={NoBlending:Nu,NormalBlending:ul,AdditiveBlending:Fp,SubtractiveBlending:bp,MultiplyBlending:vp,CustomBlending:V3},t=new _t,i=new U1,n=new N1;return function(s,r,o){var l={};function c(m,F,y,k,C){var w=r+m,B=Mh.Handlers.get(w),S;B!==null?S=B.load(w):(i.setCrossOrigin(o),S=i.load(w)),F!==void 0&&(S.repeat.fromArray(F),F[0]!==1&&(S.wrapS=ur),F[1]!==1&&(S.wrapT=ur)),y!==void 0&&S.offset.fromArray(y),k!==void 0&&(k[0]==="repeat"&&(S.wrapS=ur),k[0]==="mirror"&&(S.wrapS=ml),k[1]==="repeat"&&(S.wrapT=ur),k[1]==="mirror"&&(S.wrapT=ml)),C!==void 0&&(S.anisotropy=C);var U=Tt.generateUUID();return l[U]=S,U}var f={uuid:Tt.generateUUID(),type:"MeshLambertMaterial"};for(var h in s){var A=s[h];switch(h){case"DbgColor":case"DbgIndex":case"opticalDensity":case"illumination":break;case"DbgName":f.name=A;break;case"blending":f.blending=e[A];break;case"colorAmbient":case"mapAmbient":console.warn("THREE.Loader.createMaterial:",h,"is no longer supported.");break;case"colorDiffuse":f.color=t.fromArray(A).getHex();break;case"colorSpecular":f.specular=t.fromArray(A).getHex();break;case"colorEmissive":f.emissive=t.fromArray(A).getHex();break;case"specularCoef":f.shininess=A;break;case"shading":A.toLowerCase()==="basic"&&(f.type="MeshBasicMaterial"),A.toLowerCase()==="phong"&&(f.type="MeshPhongMaterial"),A.toLowerCase()==="standard"&&(f.type="MeshStandardMaterial");break;case"mapDiffuse":f.map=c(A,s.mapDiffuseRepeat,s.mapDiffuseOffset,s.mapDiffuseWrap,s.mapDiffuseAnisotropy);break;case"mapDiffuseRepeat":case"mapDiffuseOffset":case"mapDiffuseWrap":case"mapDiffuseAnisotropy":break;case"mapEmissive":f.emissiveMap=c(A,s.mapEmissiveRepeat,s.mapEmissiveOffset,s.mapEmissiveWrap,s.mapEmissiveAnisotropy);break;case"mapEmissiveRepeat":case"mapEmissiveOffset":case"mapEmissiveWrap":case"mapEmissiveAnisotropy":break;case"mapLight":f.lightMap=c(A,s.mapLightRepeat,s.mapLightOffset,s.mapLightWrap,s.mapLightAnisotropy);break;case"mapLightRepeat":case"mapLightOffset":case"mapLightWrap":case"mapLightAnisotropy":break;case"mapAO":f.aoMap=c(A,s.mapAORepeat,s.mapAOOffset,s.mapAOWrap,s.mapAOAnisotropy);break;case"mapAORepeat":case"mapAOOffset":case"mapAOWrap":case"mapAOAnisotropy":break;case"mapBump":f.bumpMap=c(A,s.mapBumpRepeat,s.mapBumpOffset,s.mapBumpWrap,s.mapBumpAnisotropy);break;case"mapBumpScale":f.bumpScale=A;break;case"mapBumpRepeat":case"mapBumpOffset":case"mapBumpWrap":case"mapBumpAnisotropy":break;case"mapNormal":f.normalMap=c(A,s.mapNormalRepeat,s.mapNormalOffset,s.mapNormalWrap,s.mapNormalAnisotropy);break;case"map
|
|||
|
Object.assign(Tn.prototype,{_getValue_unbound:Tn.prototype.getValue,_setValue_unbound:Tn.prototype.setValue});function pV(){this.uuid=Tt.generateUUID(),this._objects=Array.prototype.slice.call(arguments),this.nCachedObjects_=0;var e={};this._indicesByUUID=e;for(var t=0,i=arguments.length;t!==i;++t)e[arguments[t].uuid]=t;this._paths=[],this._parsedPaths=[],this._bindings=[],this._bindingsIndicesByPath={};var n=this;this.stats={objects:{get total(){return n._objects.length},get inUse(){return this.total-n.nCachedObjects_}},get bindingsPerObject(){return n._bindings.length}}}Object.assign(pV.prototype,{isAnimationObjectGroup:!0,add:function(){for(var e=this._objects,t=e.length,i=this.nCachedObjects_,n=this._indicesByUUID,a=this._paths,s=this._parsedPaths,r=this._bindings,o=r.length,l=void 0,c=0,f=arguments.length;c!==f;++c){var h=arguments[c],A=h.uuid,m=n[A];if(m===void 0){m=t++,n[A]=m,e.push(h);for(var F=0,y=o;F!==y;++F)r[F].push(new Tn(h,a[F],s[F]))}else if(m<i){l=e[m];var k=--i,C=e[k];n[C.uuid]=m,e[m]=C,n[A]=k,e[k]=h;for(var F=0,y=o;F!==y;++F){var w=r[F],B=w[k],S=w[m];w[m]=B,S===void 0&&(S=new Tn(h,a[F],s[F])),w[k]=S}}else e[m]!==l&&console.error("THREE.AnimationObjectGroup: Different objects with the same UUID detected. Clean the caches or recreate your infrastructure when reloading scenes.")}this.nCachedObjects_=i},remove:function(){for(var e=this._objects,t=this.nCachedObjects_,i=this._indicesByUUID,n=this._bindings,a=n.length,s=0,r=arguments.length;s!==r;++s){var o=arguments[s],l=o.uuid,c=i[l];if(c!==void 0&&c>=t){var f=t++,h=e[f];i[h.uuid]=c,e[c]=h,i[l]=f,e[f]=o;for(var A=0,m=a;A!==m;++A){var F=n[A],y=F[f],k=F[c];F[c]=y,F[f]=k}}}this.nCachedObjects_=t},uncache:function(){for(var e=this._objects,t=e.length,i=this.nCachedObjects_,n=this._indicesByUUID,a=this._bindings,s=a.length,r=0,o=arguments.length;r!==o;++r){var l=arguments[r],c=l.uuid,f=n[c];if(f!==void 0)if(delete n[c],f<i){var h=--i,A=e[h],m=--t,F=e[m];n[A.uuid]=f,e[f]=A,n[F.uuid]=h,e[h]=F,e.pop();for(var y=0,k=s;y!==k;++y){var C=a[y],w=C[h],B=C[m];C[f]=w,C[h]=B,C.pop()}}else{var m=--t,F=e[m];n[F.uuid]=f,e[f]=F,e.pop();for(var y=0,k=s;y!==k;++y){var C=a[y];C[f]=C[m],C.pop()}}}this.nCachedObjects_=i},subscribe_:function(e,t){var i=this._bindingsIndicesByPath,n=i[e],a=this._bindings;if(n!==void 0)return a[n];var s=this._paths,r=this._parsedPaths,o=this._objects,l=o.length,c=this.nCachedObjects_,f=new Array(l);n=a.length,i[e]=n,s.push(e),r.push(t),a.push(f);for(var h=c,A=o.length;h!==A;++h){var m=o[h];f[h]=new Tn(m,e,t)}return f},unsubscribe_:function(e){var t=this._bindingsIndicesByPath,i=t[e];if(i!==void 0){var n=this._paths,a=this._parsedPaths,s=this._bindings,r=s.length-1,o=s[r],l=e[r];t[l]=i,s[i]=o,s.pop(),a[i]=a[r],a.pop(),n[i]=n[r],n.pop()}}});function U4(e,t,i){this._mixer=e,this._clip=t,this._localRoot=i||null;for(var n=t.tracks,a=n.length,s=new Array(a),r={endingStart:pl,endingEnd:pl},o=0;o!==a;++o){var l=n[o].createInterpolant(null);s[o]=l,l.settings=r}this._interpolantSettings=r,this._interpolants=s,this._propertyBindings=new Array(a),this._cacheIndex=null,this._byClipCacheIndex=null,this._timeScaleInterpolant=null,this._weightInterpolant=null,this.loop=ij,this._loopCount=-1,this._startTime=null,this.time=0,this.timeScale=1,this._effectiveTimeScale=1,this.weight=1,this._effectiveWeight=1,this.repetitions=1/0,this.paused=!1,this.enabled=!0,this.clampWhenFinished=!1,this.zeroSlopeAtStart=!0,this.zeroSlopeAtEnd=!0}Object.assign(U4.prototype,{play:function(){return this._mixer._activateAction(this),this},stop:function(){return this._mixer._deactivateAction(this),this.reset()},reset:function(){return this.paused=!1,this.enabled=!0,this.time=0,this._loopCount=-1,this._startTime=null,this.stopFading().stopWarping()},isRunning:function(){return this.enabled&&!this.paused&&this.timeScale!==0&&this._startTime===null&&this._mixer._isActiveAction(this)},isScheduled:function(){return this._mixer._isActiveAction(this)},startAt:function(e){return this._startTime=e,this},setLoop:function(e,t){return this.loop=e,this.repetitions=t,this},setEffectiveWeight:function(e){
|
|||
|
const vV="",yV="";let jd=!1,bv,wV=0;function CV(e){const[t,i]=e.split(","),n=t.match(/:(.*?);/)[1],a=atob(i),s=new ArrayBuffer(a.length),r=new Uint8Array(s);for(let o=0;o<a.length;o++)r[o]=a.charCodeAt(o);return new Blob([s],{type:n})}class kV{constructor(){window.addEventListener("resize",this.resize.bind(this)),this.renderer=new R1,this.renderer.autoClear=!1,this.renderer.setSize(window.innerWidth,window.innerHeight),this.rtTexture=new Ja(window.innerWidth,window.innerHeight,{minFilter:Fn,magFilter:ln,format:Is,type:MA}),this.gameTexture=new vl,this.gameTexture.needsUpdate=!0,this.material=this.createShaderMaterial(),this.sceneRTT=this.createScene(),this.cameraRTT=this.createCamera(),this.appendRendererToDOM(),this.animate=this.animate.bind(this),requestAnimationFrame(this.animate)}createCamera(){const t=new Nc(window.innerWidth/-2,window.innerWidth/2,window.innerHeight/2,window.innerHeight/-2,-1e4,1e4);return t.setViewOffset(window.innerWidth,window.innerHeight,0,0,window.innerWidth,window.innerHeight),t}createScene(){const t=new oh,i=new go(window.innerWidth,window.innerHeight),n=new In(i,this.material);return n.position.z=-100,t.add(n),t}createShaderMaterial(){return new Yn({uniforms:{tDiffuse:{value:this.gameTexture}},vertexShader:`
|
|||
|
varying vec2 vUv;
|
|||
|
void main() {
|
|||
|
vUv = vec2(uv.x, 1.0 - uv.y);
|
|||
|
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
|
|||
|
}`,fragmentShader:`
|
|||
|
varying vec2 vUv;
|
|||
|
uniform sampler2D tDiffuse;
|
|||
|
void main() {
|
|||
|
gl_FragColor = texture2D(tDiffuse, vUv);
|
|||
|
}`})}appendRendererToDOM(){const t=document.createElement("div");t.id="three-game-render",t.style.display="none",t.appendChild(this.renderer.domElement),document.body.appendChild(t)}resize(){this.cameraRTT=this.createCamera(),this.sceneRTT=this.createScene(),this.rtTexture.setSize(window.innerWidth,window.innerHeight),this.renderer.setSize(window.innerWidth,window.innerHeight)}animate(){if(requestAnimationFrame(this.animate),jd){this.renderer.clear(),this.renderer.render(this.sceneRTT,this.cameraRTT,this.rtTexture,!0);const t=new Uint8Array(window.innerWidth*window.innerHeight*4);this.renderer.readRenderTargetPixels(this.rtTexture,0,0,window.innerWidth,window.innerHeight,t),this.updateCanvas(t)}}updateCanvas(t){this.canvas||this.createTempCanvas(),this.canvas.style.display="inline",this.canvas.width=window.innerWidth,this.canvas.height=window.innerHeight;const i=this.canvas.getContext("2d"),n=new ImageData(new Uint8ClampedArray(t.buffer),window.innerWidth,window.innerHeight);i.putImageData(n,0,0)}createTempCanvas(){this.canvas=document.createElement("canvas"),this.canvas.style.display="none",document.body.appendChild(this.canvas)}renderToTarget(t){this.canvas=t,jd=!0}async requestScreenshot(t=vV,i=yV,n={}){if(!t||!i)return console.warn("URL or field is not defined."),null;this.canvas||this.createTempCanvas(),jd=!0,await new Promise(r=>setTimeout(r,10));const a=this.canvas.toDataURL("image/png"),s=new FormData;s.append(i,CV(a),"screenshot.png");try{const o=await(await fetch(t,{method:"POST",mode:"cors",headers:n,body:s})).json();return wV++,this.canvas.style.display="none",o}catch(r){return console.error("Screenshot-Upload error:",r),null}finally{}}stop(){jd=!1,this.canvas&&(this.canvas.style.display="none")}}setTimeout(()=>{bv=new kV,window.MainRender=bv},1e3);var fm={};/*!
|
|||
|
* howler.js v2.2.4
|
|||
|
* howlerjs.com
|
|||
|
*
|
|||
|
* (c) 2013-2020, James Simpson of GoldFire Studios
|
|||
|
* goldfirestudios.com
|
|||
|
*
|
|||
|
* MIT License
|
|||
|
*/var vv;function EV(){return vv||(vv=1,function(e){(function(){var t=function(){this.init()};t.prototype={init:function(){var h=this||i;return h._counter=1e3,h._html5AudioPool=[],h.html5PoolSize=10,h._codecs={},h._howls=[],h._muted=!1,h._volume=1,h._canPlayEvent="canplaythrough",h._navigator=typeof window<"u"&&window.navigator?window.navigator:null,h.masterGain=null,h.noAudio=!1,h.usingWebAudio=!0,h.autoSuspend=!0,h.ctx=null,h.autoUnlock=!0,h._setup(),h},volume:function(h){var A=this||i;if(h=parseFloat(h),A.ctx||f(),typeof h<"u"&&h>=0&&h<=1){if(A._volume=h,A._muted)return A;A.usingWebAudio&&A.masterGain.gain.setValueAtTime(h,i.ctx.currentTime);for(var m=0;m<A._howls.length;m++)if(!A._howls[m]._webAudio)for(var F=A._howls[m]._getSoundIds(),y=0;y<F.length;y++){var k=A._howls[m]._soundById(F[y]);k&&k._node&&(k._node.volume=k._volume*h)}return A}return A._volume},mute:function(h){var A=this||i;A.ctx||f(),A._muted=h,A.usingWebAudio&&A.masterGain.gain.setValueAtTime(h?0:A._volume,i.ctx.currentTime);for(var m=0;m<A._howls.length;m++)if(!A._howls[m]._webAudio)for(var F=A._howls[m]._getSoundIds(),y=0;y<F.length;y++){var k=A._howls[m]._soundById(F[y]);k&&k._node&&(k._node.muted=h?!0:k._muted)}return A},stop:function(){for(var h=this||i,A=0;A<h._howls.length;A++)h._howls[A].stop();return h},unload:function(){for(var h=this||i,A=h._howls.length-1;A>=0;A--)h._howls[A].unload();return h.usingWebAudio&&h.ctx&&typeof h.ctx.close<"u"&&(h.ctx.close(),h.ctx=null,f()),h},codecs:function(h){return(this||i)._codecs[h.replace(/^x-/,"")]},_setup:function(){var h=this||i;if(h.state=h.ctx&&h.ctx.state||"suspended",h._autoSuspend(),!h.usingWebAudio)if(typeof Audio<"u")try{var A=new Audio;typeof A.oncanplaythrough>"u"&&(h._canPlayEvent="canplay")}catch{h.noAudio=!0}else h.noAudio=!0;try{var A=new Audio;A.muted&&(h.noAudio=!0)}catch{}return h.noAudio||h._setupCodecs(),h},_setupCodecs:function(){var h=this||i,A=null;try{A=typeof Audio<"u"?new Audio:null}catch{return h}if(!A||typeof A.canPlayType!="function")return h;var m=A.canPlayType("audio/mpeg;").replace(/^no$/,""),F=h._navigator?h._navigator.userAgent:"",y=F.match(/OPR\/(\d+)/g),k=y&&parseInt(y[0].split("/")[1],10)<33,C=F.indexOf("Safari")!==-1&&F.indexOf("Chrome")===-1,w=F.match(/Version\/(.*?) /),B=C&&w&&parseInt(w[1],10)<15;return h._codecs={mp3:!!(!k&&(m||A.canPlayType("audio/mp3;").replace(/^no$/,""))),mpeg:!!m,opus:!!A.canPlayType('audio/ogg; codecs="opus"').replace(/^no$/,""),ogg:!!A.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),oga:!!A.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),wav:!!(A.canPlayType('audio/wav; codecs="1"')||A.canPlayType("audio/wav")).replace(/^no$/,""),aac:!!A.canPlayType("audio/aac;").replace(/^no$/,""),caf:!!A.canPlayType("audio/x-caf;").replace(/^no$/,""),m4a:!!(A.canPlayType("audio/x-m4a;")||A.canPlayType("audio/m4a;")||A.canPlayType("audio/aac;")).replace(/^no$/,""),m4b:!!(A.canPlayType("audio/x-m4b;")||A.canPlayType("audio/m4b;")||A.canPlayType("audio/aac;")).replace(/^no$/,""),mp4:!!(A.canPlayType("audio/x-mp4;")||A.canPlayType("audio/mp4;")||A.canPlayType("audio/aac;")).replace(/^no$/,""),weba:!!(!B&&A.canPlayType('audio/webm; codecs="vorbis"').replace(/^no$/,"")),webm:!!(!B&&A.canPlayType('audio/webm; codecs="vorbis"').replace(/^no$/,"")),dolby:!!A.canPlayType('audio/mp4; codecs="ec-3"').replace(/^no$/,""),flac:!!(A.canPlayType("audio/x-flac;")||A.canPlayType("audio/flac;")).replace(/^no$/,"")},h},_unlockAudio:function(){var h=this||i;if(!(h._audioUnlocked||!h.ctx)){h._audioUnlocked=!1,h.autoUnlock=!1,!h._mobileUnloaded&&h.ctx.sampleRate!==44100&&(h._mobileUnloaded=!0,h.unload()),h._scratchBuffer=h.ctx.createBuffer(1,1,22050);var A=function(m){for(;h._html5AudioPool.length<h.html5PoolSize;)try{var F=new Audio;F._unlocked=!0,h._releaseHtml5Audio(F)}catch{h.noAudio=!0;break}for(var y=0;y<h._howls.length;y++)if(!h._howls[y]._webAudio)for(var k=h._howls[y]._getSoundIds(),C=0;C<k.length;C++){var w=h._howls[y]._soundById(k[C]);w&&w._node&&!w._node._unlocked&&(w._node._unlocked=!0,w._node.load())}h._autoResume();var B=h.ctx.createBu
|
|||
|
* Spatial Plugin - Adds support for stereo and 3D audio where Web Audio is supported.
|
|||
|
*
|
|||
|
* howler.js v2.2.4
|
|||
|
* howlerjs.com
|
|||
|
*
|
|||
|
* (c) 2013-2020, James Simpson of GoldFire Studios
|
|||
|
* goldfirestudios.com
|
|||
|
*
|
|||
|
* MIT License
|
|||
|
*/(function(){HowlerGlobal.prototype._pos=[0,0,0],HowlerGlobal.prototype._orientation=[0,0,-1,0,1,0],HowlerGlobal.prototype.stereo=function(i){var n=this;if(!n.ctx||!n.ctx.listener)return n;for(var a=n._howls.length-1;a>=0;a--)n._howls[a].stereo(i);return n},HowlerGlobal.prototype.pos=function(i,n,a){var s=this;if(!s.ctx||!s.ctx.listener)return s;if(n=typeof n!="number"?s._pos[1]:n,a=typeof a!="number"?s._pos[2]:a,typeof i=="number")s._pos=[i,n,a],typeof s.ctx.listener.positionX<"u"?(s.ctx.listener.positionX.setTargetAtTime(s._pos[0],Howler.ctx.currentTime,.1),s.ctx.listener.positionY.setTargetAtTime(s._pos[1],Howler.ctx.currentTime,.1),s.ctx.listener.positionZ.setTargetAtTime(s._pos[2],Howler.ctx.currentTime,.1)):s.ctx.listener.setPosition(s._pos[0],s._pos[1],s._pos[2]);else return s._pos;return s},HowlerGlobal.prototype.orientation=function(i,n,a,s,r,o){var l=this;if(!l.ctx||!l.ctx.listener)return l;var c=l._orientation;if(n=typeof n!="number"?c[1]:n,a=typeof a!="number"?c[2]:a,s=typeof s!="number"?c[3]:s,r=typeof r!="number"?c[4]:r,o=typeof o!="number"?c[5]:o,typeof i=="number")l._orientation=[i,n,a,s,r,o],typeof l.ctx.listener.forwardX<"u"?(l.ctx.listener.forwardX.setTargetAtTime(i,Howler.ctx.currentTime,.1),l.ctx.listener.forwardY.setTargetAtTime(n,Howler.ctx.currentTime,.1),l.ctx.listener.forwardZ.setTargetAtTime(a,Howler.ctx.currentTime,.1),l.ctx.listener.upX.setTargetAtTime(s,Howler.ctx.currentTime,.1),l.ctx.listener.upY.setTargetAtTime(r,Howler.ctx.currentTime,.1),l.ctx.listener.upZ.setTargetAtTime(o,Howler.ctx.currentTime,.1)):l.ctx.listener.setOrientation(i,n,a,s,r,o);else return c;return l},Howl.prototype.init=function(i){return function(n){var a=this;return a._orientation=n.orientation||[1,0,0],a._stereo=n.stereo||null,a._pos=n.pos||null,a._pannerAttr={coneInnerAngle:typeof n.coneInnerAngle<"u"?n.coneInnerAngle:360,coneOuterAngle:typeof n.coneOuterAngle<"u"?n.coneOuterAngle:360,coneOuterGain:typeof n.coneOuterGain<"u"?n.coneOuterGain:0,distanceModel:typeof n.distanceModel<"u"?n.distanceModel:"inverse",maxDistance:typeof n.maxDistance<"u"?n.maxDistance:1e4,panningModel:typeof n.panningModel<"u"?n.panningModel:"HRTF",refDistance:typeof n.refDistance<"u"?n.refDistance:1,rolloffFactor:typeof n.rolloffFactor<"u"?n.rolloffFactor:1},a._onstereo=n.onstereo?[{fn:n.onstereo}]:[],a._onpos=n.onpos?[{fn:n.onpos}]:[],a._onorientation=n.onorientation?[{fn:n.onorientation}]:[],i.call(this,n)}}(Howl.prototype.init),Howl.prototype.stereo=function(i,n){var a=this;if(!a._webAudio)return a;if(a._state!=="loaded")return a._queue.push({event:"stereo",action:function(){a.stereo(i,n)}}),a;var s=typeof Howler.ctx.createStereoPanner>"u"?"spatial":"stereo";if(typeof n>"u")if(typeof i=="number")a._stereo=i,a._pos=[i,0,0];else return a._stereo;for(var r=a._getSoundIds(n),o=0;o<r.length;o++){var l=a._soundById(r[o]);if(l)if(typeof i=="number")l._stereo=i,l._pos=[i,0,0],l._node&&(l._pannerAttr.panningModel="equalpower",(!l._panner||!l._panner.pan)&&t(l,s),s==="spatial"?typeof l._panner.positionX<"u"?(l._panner.positionX.setValueAtTime(i,Howler.ctx.currentTime),l._panner.positionY.setValueAtTime(0,Howler.ctx.currentTime),l._panner.positionZ.setValueAtTime(0,Howler.ctx.currentTime)):l._panner.setPosition(i,0,0):l._panner.pan.setValueAtTime(i,Howler.ctx.currentTime)),a._emit("stereo",l._id);else return l._stereo}return a},Howl.prototype.pos=function(i,n,a,s){var r=this;if(!r._webAudio)return r;if(r._state!=="loaded")return r._queue.push({event:"pos",action:function(){r.pos(i,n,a,s)}}),r;if(n=typeof n!="number"?0:n,a=typeof a!="number"?-.5:a,typeof s>"u")if(typeof i=="number")r._pos=[i,n,a];else return r._pos;for(var o=r._getSoundIds(s),l=0;l<o.length;l++){var c=r._soundById(o[l]);if(c)if(typeof i=="number")c._pos=[i,n,a],c._node&&((!c._panner||c._panner.pan)&&t(c,"spatial"),typeof c._panner.positionX<"u"?(c._panner.positionX.setValueAtTime(i,Howler.ctx.currentTime),c._panner.positionY.setValueAtTime(n,Howler.ctx.currentTime),c._panner.positionZ.setValueAtTime(a,Howler.ctx.currentTime)):c._panner.setPosition(i,n,a)),r._emit("pos",c._id);else
|
|||
|
* {@link https://github.com/muaz-khan/RecordRTC|RecordRTC} is a WebRTC JavaScript library for audio/video as well as screen activity recording. It supports Chrome, Firefox, Opera, Android, and Microsoft Edge. Platforms: Linux, Mac and Windows.
|
|||
|
* @summary Record audio, video or screen inside the browser.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef RecordRTC
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = RecordRTC(mediaStream or [arrayOfMediaStream], {
|
|||
|
* type: 'video', // audio or video or gif or canvas
|
|||
|
* recorderType: MediaStreamRecorder || CanvasRecorder || StereoAudioRecorder || Etc
|
|||
|
* });
|
|||
|
* recorder.startRecording();
|
|||
|
* @see For further information:
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
|
|||
|
* @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, desiredSampRate: 16000, video: HTMLVideoElement, etc.}
|
|||
|
*/function t(I,x){if(!I)throw"First parameter is required.";x=x||{type:"video"},x=new i(I,x);var O=this;function he(de){return x.disableLogs||console.log("RecordRTC version: ",O.version),de&&(x=new i(I,de)),x.disableLogs||console.log("started recording "+x.type+" stream."),oe?(oe.clearRecordedData(),oe.record(),Ee("recording"),O.recordingDuration&&ce(),O):(te(function(){O.recordingDuration&&ce()}),O)}function te(de){de&&(x.initCallback=function(){de(),de=x.initCallback=null});var De=new n(I,x);oe=new De(I,x),oe.record(),Ee("recording"),x.disableLogs||console.log("Initialized recorderType:",oe.constructor.name,"for output-type:",x.type)}function se(de){if(de=de||function(){},!oe){K();return}if(O.state==="paused"){O.resumeRecording(),setTimeout(function(){se(de)},1);return}O.state!=="recording"&&!x.disableLogs&&console.warn('Recording state should be: "recording", however current state is: ',O.state),x.disableLogs||console.log("Stopped recording "+x.type+" stream."),x.type!=="gif"?oe.stop(De):(oe.stop(),De()),Ee("stopped");function De(Ie){if(!oe){typeof de.call=="function"?de.call(O,""):de("");return}Object.keys(oe).forEach(function(Ue){typeof oe[Ue]!="function"&&(O[Ue]=oe[Ue])});var fe=oe.blob;if(!fe)if(Ie)oe.blob=fe=Ie;else throw"Recording failed.";if(fe&&!x.disableLogs&&console.log(fe.type,"->",C(fe.size)),de){var be;try{be=f.createObjectURL(fe)}catch{}typeof de.call=="function"?de.call(O,be):de(be)}x.autoWriteToDisk&&we(function(Ue){var Oe={};Oe[x.type+"Blob"]=Ue,ge.Store(Oe)})}}function Ae(){if(!oe){K();return}if(O.state!=="recording"){x.disableLogs||console.warn("Unable to pause the recording. Recording state: ",O.state);return}Ee("paused"),oe.pause(),x.disableLogs||console.log("Paused recording.")}function X(){if(!oe){K();return}if(O.state!=="paused"){x.disableLogs||console.warn("Unable to resume the recording. Recording state: ",O.state);return}Ee("recording"),oe.resume(),x.disableLogs||console.log("Resumed recording.")}function ve(de){postMessage(new FileReaderSync().readAsDataURL(de))}function we(de,De){if(!de)throw"Pass a callback function over getDataURL.";var Ie=De?De.blob:(oe||{}).blob;if(!Ie){x.disableLogs||console.warn("Blob encoder did not finish its job yet."),setTimeout(function(){we(de,De)},1e3);return}if(typeof Worker<"u"&&!navigator.mozGetUserMedia){var fe=Ue(ve);fe.onmessage=function(Oe){de(Oe.data)},fe.postMessage(Ie)}else{var be=new FileReader;be.readAsDataURL(Ie),be.onload=function(Oe){de(Oe.target.result)}}function Ue(Oe){try{var He=f.createObjectURL(new Blob([Oe.toString(),"this.onmessage = function (eee) {"+Oe.name+"(eee.data);}"],{type:"application/javascript"})),ze=new Worker(He);return f.revokeObjectURL(He),ze}catch{}}}function ce(de){if(de=de||0,O.state==="paused"){setTimeout(function(){ce(de)},1e3);return}if(O.state!=="stopped"){if(de>=O.recordingDuration){se(O.onRecordingStopped);return}de+=1e3,setTimeout(function(){ce(de)},1e3)}}function Ee(de){O&&(O.state=de,typeof O.onStateChanged.call=="function"?O.onStateChanged.call(O,de):O.onStateChanged(de))}var G='It seems that recorder is destroyed or "startRecording" is not invoked for '+x.type+" recorder.";function K(){x.disableLogs!==!0&&console.warn(G)}var oe,Ce={startRecording:he,stopRecording:se,pauseRecording:Ae,resumeRecording:X,initRecorder:te,setRecordingDuration:function(de,De){if(typeof de>"u")throw"recordingDuration is required.";if(typeof de!="number")throw"recordingDuration must be a number.";return O.recordingDuration=de,O.onRecordingStopped=De||function(){},{onRecordingStopped:function(Ie){O.onRecordingStopped=Ie}}},clearRecordedData:function(){if(!oe){K();return}oe.clearRecordedData(),x.disableLogs||console.log("Cleared old recorded data.")},getBlob:function(){if(!oe){K();return}return oe.blob},getDataURL:we,toURL:function(){if(!oe){K();return}return f.createObjectURL(oe.blob)},getInternalRecorder:function(){return oe},save:function(de){if(!oe){K();return}w(oe.blob,de)},getFromDisk:function(de){if(!oe){K();return}t.getFromDisk(x.type,de)},setAdvertisementArray:function(de){x.advertisement=[];for(var De=de.length,Ie=0;Ie<De;I
|
|||
|
* {@link RecordRTCConfiguration} is an inner/private helper for {@link RecordRTC}.
|
|||
|
* @summary It configures the 2nd parameter passed over {@link RecordRTC} and returns a valid "config" object.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef RecordRTCConfiguration
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var options = RecordRTCConfiguration(mediaStream, options);
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, getNativeBlob:true, etc.}
|
|||
|
*/function i(I,x){return!x.recorderType&&!x.type&&(x.audio&&x.video?x.type="video":x.audio&&!x.video&&(x.type="audio")),x.recorderType&&!x.type&&(x.recorderType===ae||x.recorderType===J||typeof V<"u"&&x.recorderType===V?x.type="video":x.recorderType===le?x.type="gif":x.recorderType===q?x.type="audio":x.recorderType===R&&(S(I,"audio").length&&S(I,"video").length||!S(I,"audio").length&&S(I,"video").length?x.type="video":S(I,"audio").length&&!S(I,"video").length&&(x.type="audio"))),typeof R<"u"&&typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&(x.mimeType||(x.mimeType="video/webm"),x.type||(x.type=x.mimeType.split("/")[0]),x.bitsPerSecond),x.type||(x.mimeType&&(x.type=x.mimeType.split("/")[0]),x.type||(x.type="audio")),x}/**
|
|||
|
* {@link GetRecorderType} is an inner/private helper for {@link RecordRTC}.
|
|||
|
* @summary It returns best recorder-type available for your browser.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef GetRecorderType
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var RecorderType = GetRecorderType(options);
|
|||
|
* var recorder = new RecorderType(options);
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
|
|||
|
*/function n(I,x){var O;return(F||h||A)&&(O=q),typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&!F&&(O=R),x.type==="video"&&(F||A)&&(O=ae,typeof V<"u"&&typeof ReadableStream<"u"&&(O=V)),x.type==="gif"&&(O=le),x.type==="canvas"&&(O=J),Q()&&O!==J&&O!==le&&typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&(S(I,"video").length||S(I,"audio").length)&&(x.type==="audio"?typeof MediaRecorder.isTypeSupported=="function"&&MediaRecorder.isTypeSupported("audio/webm")&&(O=R):typeof MediaRecorder.isTypeSupported=="function"&&MediaRecorder.isTypeSupported("video/webm")&&(O=R)),I instanceof Array&&I.length&&(O=pe),x.recorderType&&(O=x.recorderType),!x.disableLogs&&O&&O.name&&console.log("Using recorderType:",O.name||O.constructor.name),!O&&y&&(O=R),O}/**
|
|||
|
* MRecordRTC runs on top of {@link RecordRTC} to bring multiple recordings in a single place, by providing simple API.
|
|||
|
* @summary MRecordRTC stands for "Multiple-RecordRTC".
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef MRecordRTC
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new MRecordRTC();
|
|||
|
* recorder.addStream(MediaStream);
|
|||
|
* recorder.mediaType = {
|
|||
|
* audio: true, // or StereoAudioRecorder or MediaStreamRecorder
|
|||
|
* video: true, // or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder
|
|||
|
* gif: true // or GifRecorder
|
|||
|
* };
|
|||
|
* // mimeType is optional and should be set only in advance cases.
|
|||
|
* recorder.mimeType = {
|
|||
|
* audio: 'audio/wav',
|
|||
|
* video: 'video/webm',
|
|||
|
* gif: 'image/gif'
|
|||
|
* };
|
|||
|
* recorder.startRecording();
|
|||
|
* @see For further information:
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC/tree/master/MRecordRTC|MRecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @requires {@link RecordRTC}
|
|||
|
*/function a(I){this.addStream=function(x){x&&(I=x)},this.mediaType={audio:!0,video:!0},this.startRecording=function(){var x=this.mediaType,O,he=this.mimeType||{audio:null,video:null,gif:null};if(typeof x.audio!="function"&&Q()&&!S(I,"audio").length&&(x.audio=!1),typeof x.video!="function"&&Q()&&!S(I,"video").length&&(x.video=!1),typeof x.gif!="function"&&Q()&&!S(I,"video").length&&(x.gif=!1),!x.audio&&!x.video&&!x.gif)throw"MediaStream must have either audio or video tracks.";if(x.audio&&(O=null,typeof x.audio=="function"&&(O=x.audio),this.audioRecorder=new t(I,{type:"audio",bufferSize:this.bufferSize,sampleRate:this.sampleRate,numberOfAudioChannels:this.numberOfAudioChannels||2,disableLogs:this.disableLogs,recorderType:O,mimeType:he.audio,timeSlice:this.timeSlice,onTimeStamp:this.onTimeStamp}),x.video||this.audioRecorder.startRecording()),x.video){O=null,typeof x.video=="function"&&(O=x.video);var te=I;if(Q()&&x.audio&&typeof x.audio=="function"){var se=S(I,"video")[0];m?(te=new k,te.addTrack(se),O&&O===ae&&(O=R)):(te=new k,te.addTrack(se))}this.videoRecorder=new t(te,{type:"video",video:this.video,canvas:this.canvas,frameInterval:this.frameInterval||10,disableLogs:this.disableLogs,recorderType:O,mimeType:he.video,timeSlice:this.timeSlice,onTimeStamp:this.onTimeStamp,workerPath:this.workerPath,webAssemblyPath:this.webAssemblyPath,frameRate:this.frameRate,bitrate:this.bitrate}),x.audio||this.videoRecorder.startRecording()}if(x.audio&&x.video){var Ae=this,X=Q()===!0;(x.audio instanceof q&&x.video||x.audio!==!0&&x.video!==!0&&x.audio!==x.video)&&(X=!1),X===!0?(Ae.audioRecorder=null,Ae.videoRecorder.startRecording()):Ae.videoRecorder.initRecorder(function(){Ae.audioRecorder.initRecorder(function(){Ae.videoRecorder.startRecording(),Ae.audioRecorder.startRecording()})})}x.gif&&(O=null,typeof x.gif=="function"&&(O=x.gif),this.gifRecorder=new t(I,{type:"gif",frameRate:this.frameRate||200,quality:this.quality||10,disableLogs:this.disableLogs,recorderType:O,mimeType:he.gif}),this.gifRecorder.startRecording())},this.stopRecording=function(x){x=x||function(){},this.audioRecorder&&this.audioRecorder.stopRecording(function(O){x(O,"audio")}),this.videoRecorder&&this.videoRecorder.stopRecording(function(O){x(O,"video")}),this.gifRecorder&&this.gifRecorder.stopRecording(function(O){x(O,"gif")})},this.pauseRecording=function(){this.audioRecorder&&this.audioRecorder.pauseRecording(),this.videoRecorder&&this.videoRecorder.pauseRecording(),this.gifRecorder&&this.gifRecorder.pauseRecording()},this.resumeRecording=function(){this.audioRecorder&&this.audioRecorder.resumeRecording(),this.videoRecorder&&this.videoRecorder.resumeRecording(),this.gifRecorder&&this.gifRecorder.resumeRecording()},this.getBlob=function(x){var O={};return this.audioRecorder&&(O.audio=this.audioRecorder.getBlob()),this.videoRecorder&&(O.video=this.videoRecorder.getBlob()),this.gifRecorder&&(O.gif=this.gifRecorder.getBlob()),x&&x(O),O},this.destroy=function(){this.audioRecorder&&(this.audioRecorder.destroy(),this.audioRecorder=null),this.videoRecorder&&(this.videoRecorder.destroy(),this.videoRecorder=null),this.gifRecorder&&(this.gifRecorder.destroy(),this.gifRecorder=null)},this.getDataURL=function(x){this.getBlob(function(te){te.audio&&te.video?O(te.audio,function(se){O(te.video,function(Ae){x({audio:se,video:Ae})})}):te.audio?O(te.audio,function(se){x({audio:se})}):te.video&&O(te.video,function(se){x({video:se})})});function O(te,se){if(typeof Worker<"u"){var Ae=he(function(we){postMessage(new FileReaderSync().readAsDataURL(we))});Ae.onmessage=function(ve){se(ve.data)},Ae.postMessage(te)}else{var X=new FileReader;X.readAsDataURL(te),X.onload=function(ve){se(ve.target.result)}}}function he(te){var se=f.createObjectURL(new Blob([te.toString(),"this.onmessage = function (eee) {"+te.name+"(eee.data);}"],{type:"application/javascript"})),Ae=new Worker(se),X;if(typeof f<"u")X=f;else if(typeof webkitURL<"u")X=webkitURL;else throw"Neither URL nor webkitURL detected.";return X.revokeObjectURL(se),Ae}},this.writeToDisk=function(){t.writeToDisk({audio:this.audioRecorder,vid
|
|||
|
* Storage is a standalone object used by {@link RecordRTC} to store reusable objects e.g. "new AudioContext".
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @example
|
|||
|
* Storage.AudioContext === webkitAudioContext
|
|||
|
* @property {webkitAudioContext} AudioContext - Keeps a reference to AudioContext object.
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
*/var z={};typeof c<"u"?z.AudioContext=c:typeof webkitAudioContext<"u"&&(z.AudioContext=webkitAudioContext),typeof t<"u"&&(t.Storage=z);function Q(){if(m||y||h)return!0;var I=navigator.userAgent,x=""+parseFloat(navigator.appVersion),O=parseInt(navigator.appVersion,10),he,te;return(F||A)&&(he=I.indexOf("Chrome"),x=I.substring(he+7)),(te=x.indexOf(";"))!==-1&&(x=x.substring(0,te)),(te=x.indexOf(" "))!==-1&&(x=x.substring(0,te)),O=parseInt(""+x,10),isNaN(O)&&(x=""+parseFloat(navigator.appVersion),O=parseInt(navigator.appVersion,10)),O>=49}/**
|
|||
|
* MediaStreamRecorder is an abstraction layer for {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}. It is used by {@link RecordRTC} to record MediaStream(s) in both Chrome and Firefox.
|
|||
|
* @summary Runs top over {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://github.com/muaz-khan|Muaz Khan}
|
|||
|
* @typedef MediaStreamRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var config = {
|
|||
|
* mimeType: 'video/webm', // vp8, vp9, h264, mkv, opus/vorbis
|
|||
|
* audioBitsPerSecond : 256 * 8 * 1024,
|
|||
|
* videoBitsPerSecond : 256 * 8 * 1024,
|
|||
|
* bitsPerSecond: 256 * 8 * 1024, // if this is provided, skip above two
|
|||
|
* checkForInactiveTracks: true,
|
|||
|
* timeSlice: 1000, // concatenate intervals based blobs
|
|||
|
* ondataavailable: function() {} // get intervals based blobs
|
|||
|
* }
|
|||
|
* var recorder = new MediaStreamRecorder(mediaStream, config);
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
*
|
|||
|
* // or
|
|||
|
* var blob = recorder.blob;
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {disableLogs:true, initCallback: function, mimeType: "video/webm", timeSlice: 1000}
|
|||
|
* @throws Will throw an error if first argument "MediaStream" is missing. Also throws error if "MediaRecorder API" are not supported by the browser.
|
|||
|
*/function R(I,x){var ce=this;if(typeof I>"u")throw'First argument "MediaStream" is required.';if(typeof MediaRecorder>"u")throw"Your browser does not support the Media Recorder API. Please try other modules e.g. WhammyRecorder or StereoAudioRecorder.";if(x=x||{mimeType:"video/webm"},x.type==="audio"){if(S(I,"video").length&&S(I,"audio").length){var O;navigator.mozGetUserMedia?(O=new k,O.addTrack(S(I,"audio")[0])):O=new k(S(I,"audio")),I=O}(!x.mimeType||x.mimeType.toString().toLowerCase().indexOf("audio")===-1)&&(x.mimeType=F?"audio/webm":"audio/ogg"),x.mimeType&&x.mimeType.toString().toLowerCase()!=="audio/ogg"&&navigator.mozGetUserMedia&&(x.mimeType="audio/ogg")}var he=[];this.getArrayOfBlobs=function(){return he},this.record=function(){ce.blob=null,ce.clearRecordedData(),ce.timestamps=[],we=[],he=[];var Ee=x;x.disableLogs||console.log("Passing following config over MediaRecorder API.",Ee),X&&(X=null),F&&!Q()&&(Ee="video/vp8"),typeof MediaRecorder.isTypeSupported=="function"&&Ee.mimeType&&(MediaRecorder.isTypeSupported(Ee.mimeType)||(x.disableLogs||console.warn("MediaRecorder API seems unable to record mimeType:",Ee.mimeType),Ee.mimeType=x.type==="audio"?"audio/webm":"video/webm"));try{X=new MediaRecorder(I,Ee),x.mimeType=Ee.mimeType}catch{X=new MediaRecorder(I)}Ee.mimeType&&!MediaRecorder.isTypeSupported&&"canRecordMimeType"in X&&X.canRecordMimeType(Ee.mimeType)===!1&&(x.disableLogs||console.warn("MediaRecorder API seems unable to record mimeType:",Ee.mimeType)),X.ondataavailable=function(G){if(G.data&&we.push("ondataavailable: "+C(G.data.size)),typeof x.timeSlice=="number"){if(G.data&&G.data.size&&(he.push(G.data),te(),typeof x.ondataavailable=="function")){var K=x.getNativeBlob?G.data:new Blob([G.data],{type:se(Ee)});x.ondataavailable(K)}return}if(!G.data||!G.data.size||G.data.size<100||ce.blob){ce.recordingCallback&&(ce.recordingCallback(new Blob([],{type:se(Ee)})),ce.recordingCallback=null);return}ce.blob=x.getNativeBlob?G.data:new Blob([G.data],{type:se(Ee)}),ce.recordingCallback&&(ce.recordingCallback(ce.blob),ce.recordingCallback=null)},X.onstart=function(){we.push("started")},X.onpause=function(){we.push("paused")},X.onresume=function(){we.push("resumed")},X.onstop=function(){we.push("stopped")},X.onerror=function(G){G&&(G.name||(G.name="UnknownError"),we.push("error: "+G),x.disableLogs||(G.name.toString().toLowerCase().indexOf("invalidstate")!==-1?console.error("The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.",G):G.name.toString().toLowerCase().indexOf("notsupported")!==-1?console.error("MIME type (",Ee.mimeType,") is not supported.",G):G.name.toString().toLowerCase().indexOf("security")!==-1?console.error("MediaRecorder security error",G):G.name==="OutOfMemory"?console.error("The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="IllegalStreamModification"?console.error("A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="OtherRecordingError"?console.error("Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="GenericError"?console.error("The UA cannot provide the codec or recording option that has been requested.",G):console.error("MediaRecorder Error",G)),function(K){if(!ce.manuallyStopped&&X&&X.state==="inactive"){delete x.timeslice,X.start(600*1e3);return}setTimeout(K,1e3)}(),X.state!=="inactive"&&X.state!=="stopped"&&X.stop())},typeof x.timeSlice=="number"?(te(),X.start(x.timeSlice)):X.start(36e5),x.initCallback&&x.initCallback()},this.timestamps=[];function te(){ce.timestamps.push(new Date().getTime()),typeof x.onTimeStamp=="function"&&x.onTimeStamp(ce.timestamps[ce.timestamps.length-1],ce.timestamps)}function se(Ee){return X&&X.mim
|
|||
|
* StereoAudioRecorder is a standalone class used by {@link RecordRTC} to bring "stereo" audio-recording in chrome.
|
|||
|
* @summary JavaScript standalone object for stereo audio recording.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef StereoAudioRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new StereoAudioRecorder(MediaStream, {
|
|||
|
* sampleRate: 44100,
|
|||
|
* bufferSize: 4096
|
|||
|
* });
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {sampleRate: 44100, bufferSize: 4096, numberOfAudioChannels: 1, etc.}
|
|||
|
*/function q(I,x){if(!S(I,"audio").length)throw"Your stream has no audio tracks.";x=x||{};var O=this,he=[],te=[],se=!1,Ae=0,X,ve=2,we=x.desiredSampRate;x.leftChannel===!0&&(ve=1),x.numberOfAudioChannels===1&&(ve=1),(!ve||ve<1)&&(ve=2),x.disableLogs||console.log("StereoAudioRecorder is set to record number of channels: "+ve),typeof x.checkForInactiveTracks>"u"&&(x.checkForInactiveTracks=!0);function ce(){if(x.checkForInactiveTracks===!1)return!0;if("active"in I){if(!I.active)return!1}else if("ended"in I&&I.ended)return!1;return!0}this.record=function(){if(ce()===!1)throw"Please make sure MediaStream is active.";Ie(),be=De=!1,se=!0,typeof x.timeSlice<"u"&&He()};function Ee(ze,Je){function tt(qe,Ze){var ht=qe.numberOfAudioChannels,$e=qe.leftBuffers.slice(0),Ft=qe.rightBuffers.slice(0),kt=qe.sampleRate,pt=qe.internalInterleavedLength,Lt=qe.desiredSampRate;ht===2&&($e=gi($e,pt),Ft=gi(Ft,pt),Lt&&($e=St($e,Lt,kt),Ft=St(Ft,Lt,kt))),ht===1&&($e=gi($e,pt),Lt&&($e=St($e,Lt,kt))),Lt&&(kt=Lt);function St(Ti,Mn,fn){var mi=Math.round(Ti.length*(Mn/fn)),nt=[],W=Number((Ti.length-1)/(mi-1));nt[0]=Ti[0];for(var ie=1;ie<mi-1;ie++){var _e=ie*W,Ve=Number(Math.floor(_e)).toFixed(),st=Number(Math.ceil(_e)).toFixed(),Rt=_e-Ve;nt[ie]=nn(Ti[Ve],Ti[st],Rt)}return nt[mi-1]=Ti[Ti.length-1],nt}function nn(Ti,Mn,fn){return Ti+(Mn-Ti)*fn}function gi(Ti,Mn){for(var fn=new Float64Array(Mn),mi=0,nt=Ti.length,W=0;W<nt;W++){var ie=Ti[W];fn.set(ie,mi),mi+=ie.length}return fn}function os(Ti,Mn){for(var fn=Ti.length+Mn.length,mi=new Float64Array(fn),nt=0,W=0;W<fn;)mi[W++]=Ti[nt],mi[W++]=Mn[nt],nt++;return mi}function ta(Ti,Mn,fn){for(var mi=fn.length,nt=0;nt<mi;nt++)Ti.setUint8(Mn+nt,fn.charCodeAt(nt))}var ba;ht===2&&(ba=os($e,Ft)),ht===1&&(ba=$e);var Ma=ba.length,qs=44+Ma*2,La=new ArrayBuffer(qs),di=new DataView(La);ta(di,0,"RIFF"),di.setUint32(4,36+Ma*2,!0),ta(di,8,"WAVE"),ta(di,12,"fmt "),di.setUint32(16,16,!0),di.setUint16(20,1,!0),di.setUint16(22,ht,!0),di.setUint32(24,kt,!0),di.setUint32(28,kt*ht*2,!0),di.setUint16(32,ht*2,!0),di.setUint16(34,16,!0),ta(di,36,"data"),di.setUint32(40,Ma*2,!0);for(var Yl=Ma,Pr=44,Mr=1,Lr=0;Lr<Yl;Lr++)di.setInt16(Pr,ba[Lr]*(32767*Mr),!0),Pr+=2;if(Ze)return Ze({buffer:La,view:di});postMessage({buffer:La,view:di})}if(ze.noWorker){tt(ze,function(qe){Je(qe.buffer,qe.view)});return}var ye=G(tt);ye.onmessage=function(qe){Je(qe.data.buffer,qe.data.view),f.revokeObjectURL(ye.workerURL),ye.terminate()},ye.postMessage(ze)}function G(ze){var Je=f.createObjectURL(new Blob([ze.toString(),";this.onmessage = function (eee) {"+ze.name+"(eee.data);}"],{type:"application/javascript"})),tt=new Worker(Je);return tt.workerURL=Je,tt}this.stop=function(ze){ze=ze||function(){},se=!1,Ee({desiredSampRate:we,sampleRate:de,numberOfAudioChannels:ve,internalInterleavedLength:Ae,leftBuffers:he,rightBuffers:ve===1?[]:te,noWorker:x.noWorker},function(Je,tt){O.blob=new Blob([tt],{type:"audio/wav"}),O.buffer=new ArrayBuffer(tt.buffer.byteLength),O.view=tt,O.sampleRate=we||de,O.bufferSize=Be,O.length=Ae,be=!1,ze&&ze(O.blob)})},typeof t.Storage>"u"&&(t.Storage={AudioContextConstructor:null,AudioContext:window.AudioContext||window.webkitAudioContext}),(!t.Storage.AudioContextConstructor||t.Storage.AudioContextConstructor.state==="closed")&&(t.Storage.AudioContextConstructor=new t.Storage.AudioContext);var K=t.Storage.AudioContextConstructor,oe=K.createMediaStreamSource(I),Ce=[0,256,512,1024,2048,4096,8192,16384],Be=typeof x.bufferSize>"u"?4096:x.bufferSize;if(Ce.indexOf(Be)===-1&&(x.disableLogs||console.log("Legal values for buffer-size are "+JSON.stringify(Ce,null," "))),K.createJavaScriptNode)X=K.createJavaScriptNode(Be,ve,ve);else if(K.createScriptProcessor)X=K.createScriptProcessor(Be,ve,ve);else throw"WebAudio API has no support on this browser.";oe.connect(X),x.bufferSize||(Be=X.bufferSize);var de=typeof x.sampleRate<"u"?x.sampleRate:K.sampleRate||44100;(de<22050||de>96e3)&&(x.disableLogs||console.log("sample-rate must be under range 22050 and 96000.")),x.disableLogs||x.desiredSampRate&&console.log("Desired sample-rate: "+x.desiredSampRate);var De=!1;th
|
|||
|
* CanvasRecorder is a standalone class used by {@link RecordRTC} to bring HTML5-Canvas recording into video WebM. It uses HTML2Canvas library and runs top over {@link Whammy}.
|
|||
|
* @summary HTML2Canvas recording into video WebM.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef CanvasRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new CanvasRecorder(htmlElement, { disableLogs: true, useWhammyRecorder: true });
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {HTMLElement} htmlElement - querySelector/getElementById/getElementsByTagName[0]/etc.
|
|||
|
* @param {object} config - {disableLogs:true, initCallback: function}
|
|||
|
*/function J(I,x){if(typeof html2canvas>"u")throw"Please link: https://www.webrtc-experiment.com/screenshot.js";x=x||{},x.frameInterval||(x.frameInterval=10);var O=!1;["captureStream","mozCaptureStream","webkitCaptureStream"].forEach(function(Ce){Ce in document.createElement("canvas")&&(O=!0)});var he=(!!window.webkitRTCPeerConnection||!!window.webkitGetUserMedia)&&!!window.chrome,te=50,se=navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./);he&&se&&se[2]&&(te=parseInt(se[2],10)),he&&te<52&&(O=!1),x.useWhammyRecorder&&(O=!1);var Ae,X;if(O)if(x.disableLogs||console.log("Your browser supports both MediRecorder API and canvas.captureStream!"),I instanceof HTMLCanvasElement)Ae=I;else if(I instanceof CanvasRenderingContext2D)Ae=I.canvas;else throw"Please pass either HTMLCanvasElement or CanvasRenderingContext2D.";else navigator.mozGetUserMedia&&(x.disableLogs||console.error("Canvas recording is NOT supported in Firefox."));var ve;this.record=function(){if(ve=!0,O&&!x.useWhammyRecorder){var Ce;"captureStream"in Ae?Ce=Ae.captureStream(25):"mozCaptureStream"in Ae?Ce=Ae.mozCaptureStream(25):"webkitCaptureStream"in Ae&&(Ce=Ae.webkitCaptureStream(25));try{var Be=new k;Be.addTrack(S(Ce,"video")[0]),Ce=Be}catch{}if(!Ce)throw"captureStream API are NOT available.";X=new R(Ce,{mimeType:x.mimeType||"video/webm"}),X.record()}else oe.frames=[],K=new Date().getTime(),G();x.initCallback&&x.initCallback()},this.getWebPImages=function(Ce){if(I.nodeName.toLowerCase()!=="canvas"){Ce();return}var Be=oe.frames.length;oe.frames.forEach(function(de,De){var Ie=Be-De;x.disableLogs||console.log(Ie+"/"+Be+" frames remaining"),x.onEncodingCallback&&x.onEncodingCallback(Ie,Be);var fe=de.image.toDataURL("image/webp",1);oe.frames[De].image=fe}),x.disableLogs||console.log("Generating WebM"),Ce()},this.stop=function(Ce){ve=!1;var Be=this;if(O&&X){X.stop(Ce);return}this.getWebPImages(function(){oe.compile(function(de){x.disableLogs||console.log("Recording finished!"),Be.blob=de,Be.blob.forEach&&(Be.blob=new Blob([],{type:"video/webm"})),Ce&&Ce(Be.blob),oe.frames=[]})})};var we=!1;this.pause=function(){if(we=!0,X instanceof R){X.pause();return}},this.resume=function(){if(we=!1,X instanceof R){X.resume();return}ve||this.record()},this.clearRecordedData=function(){ve&&this.stop(ce),ce()};function ce(){oe.frames=[],ve=!1,we=!1}this.name="CanvasRecorder",this.toString=function(){return this.name};function Ee(){var Ce=document.createElement("canvas"),Be=Ce.getContext("2d");return Ce.width=I.width,Ce.height=I.height,Be.drawImage(I,0,0),Ce}function G(){if(we)return K=new Date().getTime(),setTimeout(G,500);if(I.nodeName.toLowerCase()==="canvas"){var Ce=new Date().getTime()-K;K=new Date().getTime(),oe.frames.push({image:Ee(),duration:Ce}),ve&&setTimeout(G,x.frameInterval);return}html2canvas(I,{grabMouse:typeof x.showMousePointer>"u"||x.showMousePointer,onrendered:function(Be){var de=new Date().getTime()-K;if(!de)return setTimeout(G,x.frameInterval);K=new Date().getTime(),oe.frames.push({image:Be.toDataURL("image/webp",1),duration:de}),ve&&setTimeout(G,x.frameInterval)}})}var K=new Date().getTime(),oe=new Fe.Video(100)}typeof t<"u"&&(t.CanvasRecorder=J);/**
|
|||
|
* WhammyRecorder is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It runs top over {@link Whammy}.
|
|||
|
* @summary Video recording feature in Chrome.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef WhammyRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new WhammyRecorder(mediaStream);
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {disableLogs: true, initCallback: function, video: HTMLVideoElement, etc.}
|
|||
|
*/function ae(I,x){x=x||{},x.frameInterval||(x.frameInterval=10),x.disableLogs||console.log("Using frames-interval:",x.frameInterval),this.record=function(){x.width||(x.width=320),x.height||(x.height=240),x.video||(x.video={width:x.width,height:x.height}),x.canvas||(x.canvas={width:x.width,height:x.height}),ve.width=x.canvas.width||320,ve.height=x.canvas.height||240,we=ve.getContext("2d"),x.video&&x.video instanceof HTMLVideoElement?(ce=x.video.cloneNode(),x.initCallback&&x.initCallback()):(ce=document.createElement("video"),U(I,ce),ce.onloadedmetadata=function(){x.initCallback&&x.initCallback()},ce.width=x.video.width,ce.height=x.video.height),ce.muted=!0,ce.play(),Ee=new Date().getTime(),G=new Fe.Video,x.disableLogs||(console.log("canvas resolutions",ve.width,"*",ve.height),console.log("video width/height",ce.width||ve.width,"*",ce.height||ve.height)),O(x.frameInterval)};function O(K){K=typeof K<"u"?K:10;var oe=new Date().getTime()-Ee;if(!oe)return setTimeout(O,K,K);if(Ae)return Ee=new Date().getTime(),setTimeout(O,100);Ee=new Date().getTime(),ce.paused&&ce.play(),we.drawImage(ce,0,0,ve.width,ve.height),G.frames.push({duration:oe,image:ve.toDataURL("image/webp")}),se||setTimeout(O,K,K)}function he(K){var oe=-1,Ce=K.length;(function Be(){if(oe++,oe===Ce){K.callback();return}setTimeout(function(){K.functionToLoop(Be,oe)},1)})()}function te(K,oe,Ce,Be,de){var De=document.createElement("canvas");De.width=ve.width,De.height=ve.height;var Ie=De.getContext("2d"),fe=[],be=K.length,Ue={r:0,g:0,b:0},Oe=Math.sqrt(Math.pow(255,2)+Math.pow(255,2)+Math.pow(255,2)),He=0,ze=0,Je=!1;he({length:be,functionToLoop:function(tt,ye){var qe,Ze,ht,$e=function(){!Je&&ht-qe<=ht*ze||(Je=!0,fe.push(K[ye])),tt()};if(Je)$e();else{var Ft=new Image;Ft.onload=function(){Ie.drawImage(Ft,0,0,ve.width,ve.height);var kt=Ie.getImageData(0,0,ve.width,ve.height);qe=0,Ze=kt.data.length,ht=kt.data.length/4;for(var pt=0;pt<Ze;pt+=4){var Lt={r:kt.data[pt],g:kt.data[pt+1],b:kt.data[pt+2]},St=Math.sqrt(Math.pow(Lt.r-Ue.r,2)+Math.pow(Lt.g-Ue.g,2)+Math.pow(Lt.b-Ue.b,2));St<=Oe*He&&qe++}$e()},Ft.src=K[ye].image}},callback:function(){fe=fe.concat(K.slice(be)),fe.length<=0&&fe.push(K[K.length-1]),de(fe)}})}var se=!1;this.stop=function(K){K=K||function(){},se=!0;var oe=this;setTimeout(function(){te(G.frames,-1,null,null,function(Ce){G.frames=Ce,x.advertisement&&x.advertisement.length&&(G.frames=x.advertisement.concat(G.frames)),G.compile(function(Be){oe.blob=Be,oe.blob.forEach&&(oe.blob=new Blob([],{type:"video/webm"})),K&&K(oe.blob)})})},10)};var Ae=!1;this.pause=function(){Ae=!0},this.resume=function(){Ae=!1,se&&this.record()},this.clearRecordedData=function(){se||this.stop(X),X()};function X(){G.frames=[],se=!0,Ae=!1}this.name="WhammyRecorder",this.toString=function(){return this.name};var ve=document.createElement("canvas"),we=ve.getContext("2d"),ce,Ee,G}typeof t<"u"&&(t.WhammyRecorder=ae);/**
|
|||
|
* Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}
|
|||
|
* @summary A real time javascript webm encoder based on a canvas hack.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef Whammy
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new Whammy().Video(15);
|
|||
|
* recorder.add(context || canvas || dataURL);
|
|||
|
* var output = recorder.compile();
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
*/var Fe=function(){function I(he){this.frames=[],this.duration=he||1,this.quality=.8}I.prototype.add=function(he,te){if("canvas"in he&&(he=he.canvas),"toDataURL"in he&&(he=he.toDataURL("image/webp",this.quality)),!/^data:image\/webp;base64,/ig.test(he))throw"Input must be formatted properly as a base64 encoded DataURI of type image/webp";this.frames.push({image:he,duration:te||this.duration})};function x(he){var te=f.createObjectURL(new Blob([he.toString(),"this.onmessage = function (eee) {"+he.name+"(eee.data);}"],{type:"application/javascript"})),se=new Worker(te);return f.revokeObjectURL(te),se}function O(he){function te(de){var De=Ae(de);if(!De)return[];for(var Ie=3e4,fe=[{id:440786851,data:[{data:1,id:17030},{data:1,id:17143},{data:4,id:17138},{data:8,id:17139},{data:"webm",id:17026},{data:2,id:17031},{data:2,id:17029}]},{id:408125543,data:[{id:357149030,data:[{data:1e6,id:2807729},{data:"whammy",id:19840},{data:"whammy",id:22337},{data:Ce(De.duration),id:17545}]},{id:374648427,data:[{id:174,data:[{data:1,id:215},{data:1,id:29637},{data:0,id:156},{data:"und",id:2274716},{data:"V_VP8",id:134},{data:"VP8",id:2459272},{data:1,id:131},{id:224,data:[{data:De.width,id:176},{data:De.height,id:186}]}]}]}]}],be=0,Ue=0;be<de.length;){var Oe=[],He=0;do Oe.push(de[be]),He+=de[be].duration,be++;while(be<de.length&&He<Ie);var ze=0,Je={id:524531317,data:se(Ue,ze,Oe)};fe[1].data.push(Je),Ue+=He}return ce(fe)}function se(de,De,Ie){return[{data:de,id:231}].concat(Ie.map(function(fe){var be=Ee({frame:fe.data.slice(4),trackNum:1,timecode:Math.round(De)});return De+=fe.duration,{data:be,id:163}}))}function Ae(de){if(!de[0]){postMessage({error:"Something went wrong. Maybe WebP format is not supported in the current browser."});return}for(var De=de[0].width,Ie=de[0].height,fe=de[0].duration,be=1;be<de.length;be++)fe+=de[be].duration;return{duration:fe,width:De,height:Ie}}function X(de){for(var De=[];de>0;)De.push(de&255),de=de>>8;return new Uint8Array(De.reverse())}function ve(de){return new Uint8Array(de.split("").map(function(De){return De.charCodeAt(0)}))}function we(de){var De=[],Ie=de.length%8?new Array(9-de.length%8).join("0"):"";de=Ie+de;for(var fe=0;fe<de.length;fe+=8)De.push(parseInt(de.substr(fe,8),2));return new Uint8Array(De)}function ce(de){for(var De=[],Ie=0;Ie<de.length;Ie++){var fe=de[Ie].data;typeof fe=="object"&&(fe=ce(fe)),typeof fe=="number"&&(fe=we(fe.toString(2))),typeof fe=="string"&&(fe=ve(fe));var be=fe.size||fe.byteLength||fe.length,Ue=Math.ceil(Math.ceil(Math.log(be)/Math.log(2))/8),Oe=be.toString(2),He=new Array(Ue*7+7+1-Oe.length).join("0")+Oe,ze=new Array(Ue).join("0")+"1"+He;De.push(X(de[Ie].id)),De.push(we(ze)),De.push(fe)}return new Blob(De,{type:"video/webm"})}function Ee(de){var De=0;De|=128;var Ie=[de.trackNum|128,de.timecode>>8,de.timecode&255,De].map(function(fe){return String.fromCharCode(fe)}).join("")+de.frame;return Ie}function G(de){for(var De=de.RIFF[0].WEBP[0],Ie=De.indexOf("*"),fe=0,be=[];fe<4;fe++)be[fe]=De.charCodeAt(Ie+3+fe);var Ue,Oe,He;return He=be[1]<<8|be[0],Ue=He&16383,He=be[3]<<8|be[2],Oe=He&16383,{width:Ue,height:Oe,data:De,riff:de}}function K(de,De){return parseInt(de.substr(De+4,4).split("").map(function(Ie){var fe=Ie.charCodeAt(0).toString(2);return new Array(8-fe.length+1).join("0")+fe}).join(""),2)}function oe(de){for(var De=0,Ie={};De<de.length;){var fe=de.substr(De,4),be=K(de,De),Ue=de.substr(De+4+4,be);De+=8+be,Ie[fe]=Ie[fe]||[],fe==="RIFF"||fe==="LIST"?Ie[fe].push(oe(Ue)):Ie[fe].push(Ue)}return Ie}function Ce(de){return[].slice.call(new Uint8Array(new Float64Array([de]).buffer),0).map(function(De){return String.fromCharCode(De)}).reverse().join("")}var Be=new te(he.map(function(de){var De=G(oe(atob(de.image.slice(23))));return De.duration=de.duration,De}));postMessage(Be)}return I.prototype.compile=function(he){var te=x(O);te.onmessage=function(se){if(se.data.error){console.error(se.data.error);return}he(se.data)},te.postMessage(this.frames)},{Video:I}}();typeof t<"u"&&(t.Whammy=Fe);/**
|
|||
|
* DiskStorage is a standalone object used by {@link RecordRTC} to store recorded blobs in IndexedDB storage.
|
|||
|
* @summary Writing blobs into IndexedDB.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @example
|
|||
|
* DiskStorage.Store({
|
|||
|
* audioBlob: yourAudioBlob,
|
|||
|
* videoBlob: yourVideoBlob,
|
|||
|
* gifBlob : yourGifBlob
|
|||
|
* });
|
|||
|
* DiskStorage.Fetch(function(dataURL, type) {
|
|||
|
* if(type === 'audioBlob') { }
|
|||
|
* if(type === 'videoBlob') { }
|
|||
|
* if(type === 'gifBlob') { }
|
|||
|
* });
|
|||
|
* // DiskStorage.dataStoreName = 'recordRTC';
|
|||
|
* // DiskStorage.onError = function(error) { };
|
|||
|
* @property {function} init - This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
|
|||
|
* @property {function} Fetch - This method fetches stored blobs from IndexedDB.
|
|||
|
* @property {function} Store - This method stores blobs in IndexedDB.
|
|||
|
* @property {function} onError - This function is invoked for any known/unknown error.
|
|||
|
* @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
*/var ge={init:function(){var I=this;if(typeof indexedDB>"u"||typeof indexedDB.open>"u"){console.error("IndexedDB API are not available in this browser.");return}var x=1,O=this.dbName||location.href.replace(/\/|:|#|%|\.|\[|\]/g,""),he,te=indexedDB.open(O,x);function se(X){X.createObjectStore(I.dataStoreName)}function Ae(){var X=he.transaction([I.dataStoreName],"readwrite");I.videoBlob&&X.objectStore(I.dataStoreName).put(I.videoBlob,"videoBlob"),I.gifBlob&&X.objectStore(I.dataStoreName).put(I.gifBlob,"gifBlob"),I.audioBlob&&X.objectStore(I.dataStoreName).put(I.audioBlob,"audioBlob");function ve(we){X.objectStore(I.dataStoreName).get(we).onsuccess=function(ce){I.callback&&I.callback(ce.target.result,we)}}ve("audioBlob"),ve("videoBlob"),ve("gifBlob")}te.onerror=I.onError,te.onsuccess=function(){if(he=te.result,he.onerror=I.onError,he.setVersion)if(he.version!==x){var X=he.setVersion(x);X.onsuccess=function(){se(he),Ae()}}else Ae();else Ae()},te.onupgradeneeded=function(X){se(X.target.result)}},Fetch:function(I){return this.callback=I,this.init(),this},Store:function(I){return this.audioBlob=I.audioBlob,this.videoBlob=I.videoBlob,this.gifBlob=I.gifBlob,this.init(),this},onError:function(I){console.error(JSON.stringify(I,null," "))},dataStoreName:"recordRTC",dbName:null};typeof t<"u"&&(t.DiskStorage=ge);/**
|
|||
|
* GifRecorder is standalone calss used by {@link RecordRTC} to record video or canvas into animated gif.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef GifRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new GifRecorder(mediaStream || canvas || context, { onGifPreview: function, onGifRecordingStarted: function, width: 1280, height: 720, frameRate: 200, quality: 10 });
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* img.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object or HTMLCanvasElement or CanvasRenderingContext2D.
|
|||
|
* @param {object} config - {disableLogs:true, initCallback: function, width: 320, height: 240, frameRate: 200, quality: 10}
|
|||
|
*/function le(I,x){if(typeof GIFEncoder>"u"){var O=document.createElement("script");O.src="https://www.webrtc-experiment.com/gif-recorder.js",(document.body||document.documentElement).appendChild(O)}x=x||{};var he=I instanceof CanvasRenderingContext2D||I instanceof HTMLCanvasElement;this.record=function(){if(typeof GIFEncoder>"u"){setTimeout(K.record,1e3);return}if(!ve){setTimeout(K.record,1e3);return}he||(x.width||(x.width=we.offsetWidth||320),x.height||(x.height=we.offsetHeight||240),x.video||(x.video={width:x.width,height:x.height}),x.canvas||(x.canvas={width:x.width,height:x.height}),Ae.width=x.canvas.width||320,Ae.height=x.canvas.height||240,we.width=x.video.width||320,we.height=x.video.height||240),G=new GIFEncoder,G.setRepeat(0),G.setDelay(x.frameRate||200),G.setQuality(x.quality||10),G.start(),typeof x.onGifRecordingStarted=="function"&&x.onGifRecordingStarted();function oe(Ce){if(K.clearedRecordedData!==!0){if(te)return setTimeout(function(){oe(Ce)},100);ce=r(oe),typeof Ee===void 0&&(Ee=Ce),!(Ce-Ee<90)&&(!he&&we.paused&&we.play(),he||X.drawImage(we,0,0,Ae.width,Ae.height),x.onGifPreview&&x.onGifPreview(Ae.toDataURL("image/png")),G.addFrame(X),Ee=Ce)}}ce=r(oe),x.initCallback&&x.initCallback()},this.stop=function(oe){oe=oe||function(){},ce&&l(ce),this.blob=new Blob([new Uint8Array(G.stream().bin)],{type:"image/gif"}),oe(this.blob),G.stream().bin=[]};var te=!1;this.pause=function(){te=!0},this.resume=function(){te=!1},this.clearRecordedData=function(){K.clearedRecordedData=!0,se()};function se(){G&&(G.stream().bin=[])}this.name="GifRecorder",this.toString=function(){return this.name};var Ae=document.createElement("canvas"),X=Ae.getContext("2d");he&&(I instanceof CanvasRenderingContext2D?(X=I,Ae=X.canvas):I instanceof HTMLCanvasElement&&(X=I.getContext("2d"),Ae=I));var ve=!0;if(!he){var we=document.createElement("video");we.muted=!0,we.autoplay=!0,we.playsInline=!0,ve=!1,we.onloadedmetadata=function(){ve=!0},U(I,we),we.play()}var ce=null,Ee,G,K=this}typeof t<"u"&&(t.GifRecorder=le);function re(I,x){var O="Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45";(function(fe){typeof t<"u"||fe&&(typeof window<"u"||typeof ni>"u"||(ni.navigator={userAgent:O,getUserMedia:function(){}},ni.console||(ni.console={}),(typeof ni.console.log>"u"||typeof ni.console.error>"u")&&(ni.console.error=ni.console.log=ni.console.log||function(){console.log(arguments)}),typeof document>"u"&&(fe.document={documentElement:{appendChild:function(){return""}}},document.createElement=document.captureStream=document.mozCaptureStream=function(){var be={getContext:function(){return be},play:function(){},pause:function(){},drawImage:function(){},toDataURL:function(){return""},style:{}};return be},fe.HTMLVideoElement=function(){}),typeof location>"u"&&(fe.location={protocol:"file:",href:"",hash:""}),typeof screen>"u"&&(fe.screen={width:0,height:0}),typeof we>"u"&&(fe.URL={createObjectURL:function(){return""},revokeObjectURL:function(){return""}}),fe.window=ni))})(typeof ni<"u"?ni:null),x=x||"multi-streams-mixer";var he=[],te=!1,se=document.createElement("canvas"),Ae=se.getContext("2d");se.style.opacity=0,se.style.position="absolute",se.style.zIndex=-1,se.style.top="-1000em",se.style.left="-1000em",se.className=x,(document.body||document.documentElement).appendChild(se),this.disableLogs=!1,this.frameInterval=10,this.width=360,this.height=240,this.useGainNode=!0;var X=this,ve=window.AudioContext;typeof ve>"u"&&(typeof webkitAudioContext<"u"&&(ve=webkitAudioContext),typeof mozAudioContext<"u"&&(ve=mozAudioContext));var we=window.URL;typeof we>"u"&&typeof webkitURL<"u"&&(we=webkitURL),typeof navigator<"u"&&typeof navigator.getUserMedia>"u"&&(typeof navigator.webkitGetUserMedia<"u"&&(navigator.getUserMedia=navigator.webkitGetUserMedia),typeof navigator.mozGetUserMedia<"u"&&(navigator.getUserMedia=navigator.mozGetUserMedia));var ce=window.MediaStream;typeof ce>"u"&&typeof webkitMediaStream<"u"&&(ce=webkitMediaStream),typeof ce<"u"&&typeof ce.prototype.stop>"u"&&(ce.prototype.stop=function(){this.getTracks().forEach(function(f
|
|||
|
* MultiStreamRecorder can record multiple videos in single container.
|
|||
|
* @summary Multi-videos recorder.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef MultiStreamRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var options = {
|
|||
|
* mimeType: 'video/webm'
|
|||
|
* }
|
|||
|
* var recorder = new MultiStreamRecorder(ArrayOfMediaStreams, options);
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
*
|
|||
|
* // or
|
|||
|
* var blob = recorder.blob;
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStreams} mediaStreams - Array of MediaStreams.
|
|||
|
* @param {object} config - {disableLogs:true, frameInterval: 1, mimeType: "video/webm"}
|
|||
|
*/function pe(I,x){I=I||[];var O=this,he,te;x=x||{elementClass:"multi-streams-mixer",mimeType:"video/webm",video:{width:360,height:240}},x.frameInterval||(x.frameInterval=10),x.video||(x.video={}),x.video.width||(x.video.width=360),x.video.height||(x.video.height=240),this.record=function(){he=new re(I,x.elementClass||"multi-streams-mixer"),se().length&&(he.frameInterval=x.frameInterval||10,he.width=x.video.width||360,he.height=x.video.height||240,he.startDrawingFrames()),x.previewStream&&typeof x.previewStream=="function"&&x.previewStream(he.getMixedStream()),te=new R(he.getMixedStream(),x),te.record()};function se(){var Ae=[];return I.forEach(function(X){S(X,"video").forEach(function(ve){Ae.push(ve)})}),Ae}this.stop=function(Ae){te&&te.stop(function(X){O.blob=X,Ae(X),O.clearRecordedData()})},this.pause=function(){te&&te.pause()},this.resume=function(){te&&te.resume()},this.clearRecordedData=function(){te&&(te.clearRecordedData(),te=null),he&&(he.releaseStreams(),he=null)},this.addStreams=function(Ae){if(!Ae)throw"First parameter is required.";Ae instanceof Array||(Ae=[Ae]),I.concat(Ae),!(!te||!he)&&(he.appendStreams(Ae),x.previewStream&&typeof x.previewStream=="function"&&x.previewStream(he.getMixedStream()))},this.resetVideoStreams=function(Ae){he&&(Ae&&!(Ae instanceof Array)&&(Ae=[Ae]),he.resetVideoStreams(Ae))},this.getMixer=function(){return he},this.name="MultiStreamRecorder",this.toString=function(){return this.name}}typeof t<"u"&&(t.MultiStreamRecorder=pe);/**
|
|||
|
* RecordRTCPromisesHandler adds promises support in {@link RecordRTC}. Try a {@link https://github.com/muaz-khan/RecordRTC/blob/master/simple-demos/RecordRTCPromisesHandler.html|demo here}
|
|||
|
* @summary Promises for {@link RecordRTC}
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef RecordRTCPromisesHandler
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new RecordRTCPromisesHandler(mediaStream, options);
|
|||
|
* recorder.startRecording()
|
|||
|
* .then(successCB)
|
|||
|
* .catch(errorCB);
|
|||
|
* // Note: You can access all RecordRTC API using "recorder.recordRTC" e.g.
|
|||
|
* recorder.recordRTC.onStateChanged = function(state) {};
|
|||
|
* recorder.recordRTC.setRecordingDuration(5000);
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
|
|||
|
* @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
|
|||
|
* @throws Will throw an error if "new" keyword is not used to initiate "RecordRTCPromisesHandler". Also throws error if first argument "MediaStream" is missing.
|
|||
|
* @requires {@link RecordRTC}
|
|||
|
*/function $(I,x){if(!this)throw'Use "new RecordRTCPromisesHandler()"';if(typeof I>"u")throw'First argument "MediaStream" is required.';var O=this;O.recordRTC=new t(I,x),this.startRecording=function(){return new Promise(function(he,te){try{O.recordRTC.startRecording(),he()}catch(se){te(se)}})},this.stopRecording=function(){return new Promise(function(he,te){try{O.recordRTC.stopRecording(function(se){if(O.blob=O.recordRTC.getBlob(),!O.blob||!O.blob.size){te("Empty blob.",O.blob);return}he(se)})}catch(se){te(se)}})},this.pauseRecording=function(){return new Promise(function(he,te){try{O.recordRTC.pauseRecording(),he()}catch(se){te(se)}})},this.resumeRecording=function(){return new Promise(function(he,te){try{O.recordRTC.resumeRecording(),he()}catch(se){te(se)}})},this.getDataURL=function(he){return new Promise(function(te,se){try{O.recordRTC.getDataURL(function(Ae){te(Ae)})}catch(Ae){se(Ae)}})},this.getBlob=function(){return new Promise(function(he,te){try{he(O.recordRTC.getBlob())}catch(se){te(se)}})},this.getInternalRecorder=function(){return new Promise(function(he,te){try{he(O.recordRTC.getInternalRecorder())}catch(se){te(se)}})},this.reset=function(){return new Promise(function(he,te){try{he(O.recordRTC.reset())}catch(se){te(se)}})},this.destroy=function(){return new Promise(function(he,te){try{he(O.recordRTC.destroy())}catch(se){te(se)}})},this.getState=function(){return new Promise(function(he,te){try{he(O.recordRTC.getState())}catch(se){te(se)}})},this.blob=null,this.version="5.6.2"}typeof t<"u"&&(t.RecordRTCPromisesHandler=$);/**
|
|||
|
* WebAssemblyRecorder lets you create webm videos in JavaScript via WebAssembly. The library consumes raw RGBA32 buffers (4 bytes per pixel) and turns them into a webm video with the given framerate and quality. This makes it compatible out-of-the-box with ImageData from a CANVAS. With realtime mode you can also use webm-wasm for streaming webm videos.
|
|||
|
* @summary Video recording feature in Chrome, Firefox and maybe Edge.
|
|||
|
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
|
|||
|
* @author {@link https://MuazKhan.com|Muaz Khan}
|
|||
|
* @typedef WebAssemblyRecorder
|
|||
|
* @class
|
|||
|
* @example
|
|||
|
* var recorder = new WebAssemblyRecorder(mediaStream);
|
|||
|
* recorder.record();
|
|||
|
* recorder.stop(function(blob) {
|
|||
|
* video.src = URL.createObjectURL(blob);
|
|||
|
* });
|
|||
|
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
|
|||
|
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
|
|||
|
* @param {object} config - {webAssemblyPath:'webm-wasm.wasm',workerPath: 'webm-worker.js', frameRate: 30, width: 1920, height: 1080, bitrate: 1024, realtime: true}
|
|||
|
*/function V(I,x){(typeof ReadableStream>"u"||typeof WritableStream>"u")&&console.error("Following polyfill is strongly recommended: https://unpkg.com/@mattiasbuelens/web-streams-polyfill/dist/polyfill.min.js"),x=x||{},x.width=x.width||640,x.height=x.height||480,x.frameRate=x.frameRate||30,x.bitrate=x.bitrate||1200,x.realtime=x.realtime||!0;var O;function he(){return new ReadableStream({start:function(we){var ce=document.createElement("canvas"),Ee=document.createElement("video"),G=!0;Ee.srcObject=I,Ee.muted=!0,Ee.height=x.height,Ee.width=x.width,Ee.volume=0,Ee.onplaying=function(){ce.width=x.width,ce.height=x.height;var K=ce.getContext("2d"),oe=1e3/x.frameRate,Ce=setInterval(function(){if(O&&(clearInterval(Ce),we.close()),G&&(G=!1,x.onVideoProcessStarted&&x.onVideoProcessStarted()),K.drawImage(Ee,0,0),we._controlledReadableStream.state!=="closed")try{we.enqueue(K.getImageData(0,0,x.width,x.height))}catch{}},oe)},Ee.play()}})}var te;function se(we,ce){if(!x.workerPath&&!ce){O=!1,fetch("https://unpkg.com/webm-wasm@latest/dist/webm-worker.js").then(function(G){G.arrayBuffer().then(function(K){se(we,K)})});return}if(!x.workerPath&&ce instanceof ArrayBuffer){var Ee=new Blob([ce],{type:"text/javascript"});x.workerPath=f.createObjectURL(Ee)}x.workerPath||console.error("workerPath parameter is missing."),te=new Worker(x.workerPath),te.postMessage(x.webAssemblyPath||"https://unpkg.com/webm-wasm@latest/dist/webm-wasm.wasm"),te.addEventListener("message",function(G){G.data==="READY"?(te.postMessage({width:x.width,height:x.height,bitrate:x.bitrate||1200,timebaseDen:x.frameRate||30,realtime:x.realtime}),he().pipeTo(new WritableStream({write:function(K){if(O){console.error("Got image, but recorder is finished!");return}te.postMessage(K.data.buffer,[K.data.buffer])}}))):G.data&&(Ae||ve.push(G.data))})}this.record=function(){ve=[],Ae=!1,this.blob=null,se(I),typeof x.initCallback=="function"&&x.initCallback()};var Ae;this.pause=function(){Ae=!0},this.resume=function(){Ae=!1};function X(we){if(!te){we&&we();return}te.addEventListener("message",function(ce){ce.data===null&&(te.terminate(),te=null,we&&we())}),te.postMessage(null)}var ve=[];this.stop=function(we){O=!0;var ce=this;X(function(){ce.blob=new Blob(ve,{type:"video/webm"}),we(ce.blob)})},this.name="WebAssemblyRecorder",this.toString=function(){return this.name},this.clearRecordedData=function(){ve=[],Ae=!1,this.blob=null},this.blob=null}typeof t<"u"&&(t.WebAssemblyRecorder=V)}(hm)),hm.exports}var GK=jK();const H1=w1(GK),zK={name:"MessageContactInfo",components:{ContactsAddContact:xA,Icon:Qe},computed:{...Z(Xt,["contacts"]),...Z(ue,["config","IntlString","blockedNumbers"])},data(){return{phoneNumber:null,contact:null}},methods:{...xe(ki,["setBrowsePicture"]),blockContact(){if(this.blockCooldown){M.onshow_notification({data:{img:"/public/img/Apps/light_mode/message.webp",apptitle:"APP_MESSAGES_NAME",title:"CONTACT_APP_CONTACT_VIEW_BLOCK_COOLDOWN",message:""}});return}if(this.blockCooldown=!0,setTimeout(()=>{this.blockCooldown=!1},5e3),this.blockedNumbers.includes(this.phoneNumber)){M.post("removeBlock",{number:this.phoneNumber});return}M.post("blockContact",{number:this.phoneNumber})},startVideoCall(){M.startVideoCall(this.phoneNumber)},closeContactInfo(){document.getElementById("contact-addcontact-box").classList.add("addcontact-out-class"),setTimeout(()=>{this.phoneNumber=null},180),Ge().emit("message_brightness",{brightness:!1})},addContact(){Ge().emit("addContact",{number:this.phoneNumber})},sendGPSLocation(){M.sendMessage(this.phoneNumber,"%pos%"),this.closeContactInfo()},startCall(){M.startCall(this.phoneNumber)},openMail(e){Te.push("/mail/list/"+e)}},mounted(){this.emitter.on("openMessageContactInfo",e=>{if(!e.number){Te.push("/messages/list");return}this.phoneNumber=e.number;let t=this.contacts.filter(i=>i.number==e.number);t.length>0&&(this.contact=t[0]),Ge().emit("message_brightness",{brightness:!0})})},beforeUnmount(){this.emitter.off("openMessageContactInfo")}},qK={key:0,class:"contact-addcontact-box",id:"contact-addcontact-box"},VK=["src"],WK={class:"message-group-edit
|
|||
|
* Leaflet 1.9.4, a JS library for interactive maps. https://leafletjs.com
|
|||
|
* (c) 2010-2023 Vladimir Agafonkin, (c) 2010-2011 CloudMade
|
|||
|
*/var E_e=vu.exports,kv;function B_e(){return kv||(kv=1,function(e,t){(function(i,n){n(t)})(E_e,function(i){var n="1.9.4";function a(u){var g,v,E,P;for(v=1,E=arguments.length;v<E;v++){P=arguments[v];for(g in P)u[g]=P[g]}return u}var s=Object.create||function(){function u(){}return function(g){return u.prototype=g,new u}}();function r(u,g){var v=Array.prototype.slice;if(u.bind)return u.bind.apply(u,v.call(arguments,1));var E=v.call(arguments,2);return function(){return u.apply(g,E.length?E.concat(v.call(arguments)):arguments)}}var o=0;function l(u){return"_leaflet_id"in u||(u._leaflet_id=++o),u._leaflet_id}function c(u,g,v){var E,P,j,me;return me=function(){E=!1,P&&(j.apply(v,P),P=!1)},j=function(){E?P=arguments:(u.apply(v,arguments),setTimeout(me,g),E=!0)},j}function f(u,g,v){var E=g[1],P=g[0],j=E-P;return u===E&&v?u:((u-P)%j+j)%j+P}function h(){return!1}function A(u,g){if(g===!1)return u;var v=Math.pow(10,g===void 0?6:g);return Math.round(u*v)/v}function m(u){return u.trim?u.trim():u.replace(/^\s+|\s+$/g,"")}function F(u){return m(u).split(/\s+/)}function y(u,g){Object.prototype.hasOwnProperty.call(u,"options")||(u.options=u.options?s(u.options):{});for(var v in g)u.options[v]=g[v];return u.options}function k(u,g,v){var E=[];for(var P in u)E.push(encodeURIComponent(v?P.toUpperCase():P)+"="+encodeURIComponent(u[P]));return(!g||g.indexOf("?")===-1?"?":"&")+E.join("&")}var C=/\{ *([\w_ -]+) *\}/g;function w(u,g){return u.replace(C,function(v,E){var P=g[E];if(P===void 0)throw new Error("No value provided for variable "+v);return typeof P=="function"&&(P=P(g)),P})}var B=Array.isArray||function(u){return Object.prototype.toString.call(u)==="[object Array]"};function S(u,g){for(var v=0;v<u.length;v++)if(u[v]===g)return v;return-1}var U="data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs=";function N(u){return window["webkit"+u]||window["moz"+u]||window["ms"+u]}var z=0;function Q(u){var g=+new Date,v=Math.max(0,16-(g-z));return z=g+v,window.setTimeout(u,v)}var R=window.requestAnimationFrame||N("RequestAnimationFrame")||Q,q=window.cancelAnimationFrame||N("CancelAnimationFrame")||N("CancelRequestAnimationFrame")||function(u){window.clearTimeout(u)};function J(u,g,v){if(v&&R===Q)u.call(g);else return R.call(window,r(u,g))}function ae(u){u&&q.call(window,u)}var Fe={__proto__:null,extend:a,create:s,bind:r,get lastId(){return o},stamp:l,throttle:c,wrapNum:f,falseFn:h,formatNum:A,trim:m,splitWords:F,setOptions:y,getParamString:k,template:w,isArray:B,indexOf:S,emptyImageUrl:U,requestFn:R,cancelFn:q,requestAnimFrame:J,cancelAnimFrame:ae};function ge(){}ge.extend=function(u){var g=function(){y(this),this.initialize&&this.initialize.apply(this,arguments),this.callInitHooks()},v=g.__super__=this.prototype,E=s(v);E.constructor=g,g.prototype=E;for(var P in this)Object.prototype.hasOwnProperty.call(this,P)&&P!=="prototype"&&P!=="__super__"&&(g[P]=this[P]);return u.statics&&a(g,u.statics),u.includes&&(le(u.includes),a.apply(null,[E].concat(u.includes))),a(E,u),delete E.statics,delete E.includes,E.options&&(E.options=v.options?s(v.options):{},a(E.options,u.options)),E._initHooks=[],E.callInitHooks=function(){if(!this._initHooksCalled){v.callInitHooks&&v.callInitHooks.call(this),this._initHooksCalled=!0;for(var j=0,me=E._initHooks.length;j<me;j++)E._initHooks[j].call(this)}},g},ge.include=function(u){var g=this.prototype.options;return a(this.prototype,u),u.options&&(this.prototype.options=g,this.mergeOptions(u.options)),this},ge.mergeOptions=function(u){return a(this.prototype.options,u),this},ge.addInitHook=function(u){var g=Array.prototype.slice.call(arguments,1),v=typeof u=="function"?u:function(){this[u].apply(this,g)};return this.prototype._initHooks=this.prototype._initHooks||[],this.prototype._initHooks.push(v),this};function le(u){if(!(typeof L>"u"||!L||!L.Mixin)){u=B(u)?u:[u];for(var g=0;g<u.length;g++)u[g]===L.Mixin.Events&&console.warn("Deprecated include of L.Mixin.Events: this property will be removed in future releases, please inherit from L.Evented instead.",new Error().stack)}}var re={on:function(u,g,v){if(typeof u=="ob
|
|||
|
`,isFavourite:1,isDeleted:0,time:new Date},{id:4,identifier:"hdewhew",picture:"https://wallpapers.com/images/hd/fivem-9z6vdna3pkg05t7w.jpg",isFavourite:0,isDeleted:1,time:new Date}]}testYellowPage(){const t=Xr();t.posts=[{id:1,title:"Tes434343rr43r43rrrrrrrrrr7843874r3867r436784r3687543876t",text:"Te 43243 43r43r434r3r43r43 4r3434354535345433545435433454r3r43r43434343rst",number:122121},{id:2,title:"Auto zu verkaufen 223e32 32 32r323 r332",text:"Hey ich biete hier meinen Lamborghini an zu verkaufen Preis: 500 euro schnapper gönnt euch.",image:"https://www.lamborghini.com/sites/it-en/files/DAM/lamborghini/facelift_2019/models_gw/2023/03_29_revuelto/gate_models_s_02_m.jpg",number:211}]}testNewsPosts(){const t=Jr();t.posts=[{id:1,title:"Eröffnungs Cafe",text:"Hey wir eröffnen heute unser cafe in der blumenstraße!",number:122121,image:"https://cdn.discordapp.com/attachments/880552660447658007/1228016373020753930/screenshot.png?ex=662a825e&is=66180d5e&hm=bece92667d5374dbd805499697a43a0abfeb8617089cd9a3d73698ccb1f15438&"},{id:2,title:"Eröffnungs Cafe",text:"Hey wir eröffnen heute unser cafe in der blumenstraße!",number:122121,image:"https://www.swr.de/wissen/1000-antworten/1676037642957%2Cblumenwiese-118~_v-16x9@2dM_-ad6791ade5eb8b5c935dd377130b903c4b5781d8.jpg"}]}camera_open(){return this.post("camera_open")}onTakePhoto(){Ge().emit("takePhoto")}twitter_login(t,i){return this.post("twitter_login",{username:t,password:i})}twitter_postComment(t,i,n,a){return this.post("twitter_postComment",{username:t,password:i,post:n,comment:a})}ontwitter_addComment(t){Ge().emit("addTwitterComment",t.comment)}twitter_changePassword(t,i,n){return this.post("twitter_changePassword",{username:t,password:i,newPassword:n})}twitter_createAccount(t,i,n){return this.post("twitter_createAccount",{username:t,password:i,avatarUrl:n})}twitter_postTweet(t,i,n,a){this.post("twitter_postTweet",{username:t,password:i,message:n,image:a}).then(()=>{Te.push("/twitter/home")})}twitter_postTweetImg(t,i,n){return this.post("twitter_postTweetImg",{username:t,password:i,message:n})}twitter_toggleLikeTweet(t,i,n){return this.post("twitter_toggleLikeTweet",{username:t,password:i,tweetId:n})}twitter_setAvatar(t,i,n){return this.post("twitter_setAvatarUrl",{username:t,password:i,avatarUrl:n})}twitter_getTweets(t,i){return this.post("twitter_getTweets",{username:t,password:i})}twitter_getUserTweets(t,i){return this.post("twitter_getUserTweets",{username:t,password:i})}twitter_deleteTweet(t,i,n){return this.post("twitter_userssDeleteTweet",{username:t,password:i,tweetId:n})}twitter_logout(){return wi().twitterLogout(),this.post("twitter_logout")}async ontwitter_tweets(t){let i=[],n=[];t.tweets.forEach(s=>{n.push(new Promise((r,o)=>{let l=new Image;l.onerror=function(){s.authorIcon="/public/img/user.png",i.push(s),r()},l.onload=function(){i.push(s),r()},l.src=s.authorIcon}))}),await Promise.all(n);const a=wi();a.tweets=i}ontwitter_newTweet(t){const i=wi(),n=i.tweets;i.tweets=[t.tweet,...n]}ontwitter_newpost(t){const i=wi(),n=ue();i.twitterNotification===1&&i.twitterUsername!==t.post.author&&i.twitterUsername!=null&&localStorage.roadphone_app_twitter_app==="1"&&(this.onshow_notification({data:{apptitle:n.IntlString("APP_TWITTER_NOTIF_NEW_POST_TITLE"),message:"",title:t.post.author+" "+n.IntlString("APP_TWITTER_NOTIF_NEW_POST"),img:"/public/img/Apps/light_mode/tweetwave.webp"}}),this.onsetLockscreenNotify({apptitle:n.IntlString("APP_TWITTER_NOTIF_NEW_POST_TITLE"),title:t.post.author+" "+n.IntlString("APP_TWITTER_NOTIF_NEW_POST"),message:"",img:"/public/img/Apps/light_mode/tweetwave.webp",app:"twitter"}))}ontwitter_setAccount(t){wi().setAccount(t)}ontwitter_updateTweetLikes(t){const i=wi(),n=i.tweets.findIndex(s=>s.id===t.tweetId);n!==-1&&(i.tweets[n].likes=t.likes);const a=i.userTweets.findIndex(s=>s.id===t.tweetId);a!==-1&&(i.userTweets[a].likes=t.likes)}ontwitter_setTweetLikes(t){const i=wi(),n=i.tweets.findIndex(s=>s.id===t.tweetId);n!==-1&&(i.tweets[n].isLiked=t.isLiked);const a=i.userTweets.findIndex(s=>s.id===t.tweetId);a!==-1&&(i.userTweets[a].isLiked=t.isLike
|
|||
|
* html2canvas 1.4.1 <https://html2canvas.hertzen.com>
|
|||
|
* Copyright (c) 2022 Niklas von Hertzen <https://hertzen.com>
|
|||
|
* Released under MIT License
|
|||
|
*//*! *****************************************************************************
|
|||
|
Copyright (c) Microsoft Corporation.
|
|||
|
|
|||
|
Permission to use, copy, modify, and/or distribute this software for any
|
|||
|
purpose with or without fee is hereby granted.
|
|||
|
|
|||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|||
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|||
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|||
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|||
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|||
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|||
|
PERFORMANCE OF THIS SOFTWARE.
|
|||
|
***************************************************************************** */var u_=function(e,t){return u_=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(i,n){i.__proto__=n}||function(i,n){for(var a in n)Object.prototype.hasOwnProperty.call(n,a)&&(i[a]=n[a])},u_(e,t)};function Pa(e,t){if(typeof t!="function"&&t!==null)throw new TypeError("Class extends value "+String(t)+" is not a constructor or null");u_(e,t);function i(){this.constructor=e}e.prototype=t===null?Object.create(t):(i.prototype=t.prototype,new i)}var c_=function(){return c_=Object.assign||function(t){for(var i,n=1,a=arguments.length;n<a;n++){i=arguments[n];for(var s in i)Object.prototype.hasOwnProperty.call(i,s)&&(t[s]=i[s])}return t},c_.apply(this,arguments)};function kn(e,t,i,n){function a(s){return s instanceof i?s:new i(function(r){r(s)})}return new(i||(i=Promise))(function(s,r){function o(f){try{c(n.next(f))}catch(h){r(h)}}function l(f){try{c(n.throw(f))}catch(h){r(h)}}function c(f){f.done?s(f.value):a(f.value).then(o,l)}c((n=n.apply(e,[])).next())})}function hn(e,t){var i={label:0,sent:function(){if(s[0]&1)throw s[1];return s[1]},trys:[],ops:[]},n,a,s,r;return r={next:o(0),throw:o(1),return:o(2)},typeof Symbol=="function"&&(r[Symbol.iterator]=function(){return this}),r;function o(c){return function(f){return l([c,f])}}function l(c){if(n)throw new TypeError("Generator is already executing.");for(;i;)try{if(n=1,a&&(s=c[0]&2?a.return:c[0]?a.throw||((s=a.return)&&s.call(a),0):a.next)&&!(s=s.call(a,c[1])).done)return s;switch(a=0,s&&(c=[c[0]&2,s.value]),c[0]){case 0:case 1:s=c;break;case 4:return i.label++,{value:c[1],done:!1};case 5:i.label++,a=c[1],c=[0];continue;case 7:c=i.ops.pop(),i.trys.pop();continue;default:if(s=i.trys,!(s=s.length>0&&s[s.length-1])&&(c[0]===6||c[0]===2)){i=0;continue}if(c[0]===3&&(!s||c[1]>s[0]&&c[1]<s[3])){i.label=c[1];break}if(c[0]===6&&i.label<s[1]){i.label=s[1],s=c;break}if(s&&i.label<s[2]){i.label=s[2],i.ops.push(c);break}s[2]&&i.ops.pop(),i.trys.pop();continue}c=t.call(e,i)}catch(f){c=[6,f],a=0}finally{n=s=0}if(c[0]&5)throw c[1];return{value:c[0]?c[1]:void 0,done:!0}}}function Gd(e,t,i){if(arguments.length===2)for(var n=0,a=t.length,s;n<a;n++)(s||!(n in t))&&(s||(s=Array.prototype.slice.call(t,0,n)),s[n]=t[n]);return e.concat(s||t)}var Os=function(){function e(t,i,n,a){this.left=t,this.top=i,this.width=n,this.height=a}return e.prototype.add=function(t,i,n,a){return new e(this.left+t,this.top+i,this.width+n,this.height+a)},e.fromClientRect=function(t,i){return new e(i.left+t.windowBounds.left,i.top+t.windowBounds.top,i.width,i.height)},e.fromDOMRectList=function(t,i){var n=Array.from(i).find(function(a){return a.width!==0});return n?new e(n.left+t.windowBounds.left,n.top+t.windowBounds.top,n.width,n.height):e.EMPTY},e.EMPTY=new e(0,0,0,0),e}(),HA=function(e,t){return Os.fromClientRect(e,t.getBoundingClientRect())},l3e=function(e){var t=e.body,i=e.documentElement;if(!t||!i)throw new Error("Unable to get document size");var n=Math.max(Math.max(t.scrollWidth,i.scrollWidth),Math.max(t.offsetWidth,i.offsetWidth),Math.max(t.clientWidth,i.clientWidth)),a=Math.max(Math.max(t.scrollHeight,i.scrollHeight),Math.max(t.offsetHeight,i.offsetHeight),Math.max(t.clientHeight,i.clientHeight));return new Os(0,0,n,a)},QA=function(e){for(var t=[],i=0,n=e.length;i<n;){var a=e.charCodeAt(i++);if(a>=55296&&a<=56319&&i<n){var s=e.charCodeAt(i++);(s&64512)===56320?t.push(((a&1023)<<10)+(s&1023)+65536):(t.push(a),i--)}else t.push(a)}return t},Ni=function(){for(var e=[],t=0;t<arguments.length;t++)e[t]=arguments[t];if(String.fromCodePoint)return String.fromCodePoint.apply(String,e);var i=e.length;if(!i)return"";for(var n=[],a=-1,s="";++a<i;){var r=e[a];r<=65535?n.push(r):(r-=65536,n.push((r>>10)+55296,r%1024+56320)),(a+1===i||n.length>16384)&&(s+=String.fromCharCode.apply(String,n),n.length=0)}return s},Ev="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",u3e=typeof Uint8Array>"u"?[]:new Uint8Array(256);for(var zd=0;zd<Ev.length;zd++)u3e[Ev.charCodeAt(zd)]=zd;var Bv="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvw
|
|||
|
content: "" !important;
|
|||
|
display: none !important;
|
|||
|
}`,WEe=function(e){KEe(e,"."+D_+qEe+vy+`
|
|||
|
.`+x_+VEe+vy)},KEe=function(e,t){var i=e.ownerDocument;if(i){var n=i.createElement("style");n.textContent=t,e.appendChild(n)}},ik=function(){function e(){}return e.getOrigin=function(t){var i=e._link;return i?(i.href=t,i.href=i.href,i.protocol+i.hostname+i.port):"about:blank"},e.isSameOrigin=function(t){return e.getOrigin(t)===e._origin},e.setContext=function(t){e._link=t.document.createElement("a"),e._origin=e.getOrigin(t.location.href)},e._origin="about:blank",e}(),YEe=function(){function e(t,i){this.context=t,this._options=i,this._cache={}}return e.prototype.addImage=function(t){var i=Promise.resolve();return this.has(t)||(Lm(t)||$Ee(t))&&(this._cache[t]=this.loadImage(t)).catch(function(){}),i},e.prototype.match=function(t){return this._cache[t]},e.prototype.loadImage=function(t){return kn(this,void 0,void 0,function(){var i,n,a,s,r=this;return hn(this,function(o){switch(o.label){case 0:return i=ik.isSameOrigin(t),n=!Mm(t)&&this._options.useCORS===!0&&rn.SUPPORT_CORS_IMAGES&&!i,a=!Mm(t)&&!i&&!Lm(t)&&typeof this._options.proxy=="string"&&rn.SUPPORT_CORS_XHR&&!n,!i&&this._options.allowTaint===!1&&!Mm(t)&&!Lm(t)&&!a&&!n?[2]:(s=t,a?[4,this.proxy(s)]:[3,2]);case 1:s=o.sent(),o.label=2;case 2:return this.context.logger.debug("Added image "+t.substring(0,256)),[4,new Promise(function(l,c){var f=new Image;f.onload=function(){return l(f)},f.onerror=c,(eBe(s)||n)&&(f.crossOrigin="anonymous"),f.src=s,f.complete===!0&&setTimeout(function(){return l(f)},500),r._options.imageTimeout>0&&setTimeout(function(){return c("Timed out ("+r._options.imageTimeout+"ms) loading image")},r._options.imageTimeout)})];case 3:return[2,o.sent()]}})})},e.prototype.has=function(t){return typeof this._cache[t]<"u"},e.prototype.keys=function(){return Promise.resolve(Object.keys(this._cache))},e.prototype.proxy=function(t){var i=this,n=this._options.proxy;if(!n)throw new Error("No proxy defined");var a=t.substring(0,256);return new Promise(function(s,r){var o=rn.SUPPORT_RESPONSE_TYPE?"blob":"text",l=new XMLHttpRequest;l.onload=function(){if(l.status===200)if(o==="text")s(l.response);else{var h=new FileReader;h.addEventListener("load",function(){return s(h.result)},!1),h.addEventListener("error",function(A){return r(A)},!1),h.readAsDataURL(l.response)}else r("Failed to proxy resource "+a+" with status code "+l.status)},l.onerror=r;var c=n.indexOf("?")>-1?"&":"?";if(l.open("GET",""+n+c+"url="+encodeURIComponent(t)+"&responseType="+o),o!=="text"&&l instanceof XMLHttpRequest&&(l.responseType=o),i._options.imageTimeout){var f=i._options.imageTimeout;l.timeout=f,l.ontimeout=function(){return r("Timed out ("+f+"ms) proxying "+a)}}l.send()})},e}(),XEe=/^data:image\/svg\+xml/i,JEe=/^data:image\/.*;base64,/i,ZEe=/^data:image\/.*/i,$Ee=function(e){return rn.SUPPORT_SVG_DRAWING||!tBe(e)},Mm=function(e){return ZEe.test(e)},eBe=function(e){return JEe.test(e)},Lm=function(e){return e.substr(0,4)==="blob"},tBe=function(e){return e.substr(-3).toLowerCase()==="svg"||XEe.test(e)},at=function(){function e(t,i){this.type=0,this.x=t,this.y=i}return e.prototype.add=function(t,i){return new e(this.x+t,this.y+i)},e}(),qo=function(e,t,i){return new at(e.x+(t.x-e.x)*i,e.y+(t.y-e.y)*i)},uf=function(){function e(t,i,n,a){this.type=1,this.start=t,this.startControl=i,this.endControl=n,this.end=a}return e.prototype.subdivide=function(t,i){var n=qo(this.start,this.startControl,t),a=qo(this.startControl,this.endControl,t),s=qo(this.endControl,this.end,t),r=qo(n,a,t),o=qo(a,s,t),l=qo(r,o,t);return i?new e(this.start,n,r,l):new e(l,o,s,this.end)},e.prototype.add=function(t,i){return new e(this.start.add(t,i),this.startControl.add(t,i),this.endControl.add(t,i),this.end.add(t,i))},e.prototype.reverse=function(){return new e(this.end,this.endControl,this.startControl,this.start)},e}(),ua=function(e){return e.type===1},iBe=function(){function e(t){var i=t.styles,n=t.bounds,a=Eu(i.borderTopLeftRadius,n.width,n.height),s=a[0],r=a[1],o=Eu(i.borderTopRightRadius,n.width,n.height),l=o[0],c=o[1],f=Eu(i.borderBottomRightRadius,n.width,n.height),h=f[0],A=f[1],m=Eu(i.borderBottomLeftRadius,n
|
|||
|
* Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com
|
|||
|
* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
|
|||
|
* Copyright 2024 Fonticons, Inc.
|
|||
|
*/function pSe(e,t,i){return(t=FSe(t))in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function ky(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter(function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable})),i.push.apply(i,n)}return i}function Ye(e){for(var t=1;t<arguments.length;t++){var i=arguments[t]!=null?arguments[t]:{};t%2?ky(Object(i),!0).forEach(function(n){pSe(e,n,i[n])}):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(i)):ky(Object(i)).forEach(function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(i,n))})}return e}function _Se(e,t){if(typeof e!="object"||!e)return e;var i=e[Symbol.toPrimitive];if(i!==void 0){var n=i.call(e,t);if(typeof n!="object")return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return(t==="string"?String:Number)(e)}function FSe(e){var t=_Se(e,"string");return typeof t=="symbol"?t:t+""}const Ey=()=>{};let e0={},uk={},ck=null,dk={mark:Ey,measure:Ey};try{typeof window<"u"&&(e0=window),typeof document<"u"&&(uk=document),typeof MutationObserver<"u"&&(ck=MutationObserver),typeof performance<"u"&&(dk=performance)}catch{}const{userAgent:By=""}=e0.navigator||{},Sr=e0,Fi=uk,Sy=ck,hf=dk;Sr.document;const zs=!!Fi.documentElement&&!!Fi.head&&typeof Fi.addEventListener=="function"&&typeof Fi.createElement=="function",fk=~By.indexOf("MSIE")||~By.indexOf("Trident/");var bSe=/fa(s|r|l|t|d|dr|dl|dt|b|k|kd|ss|sr|sl|st|sds|sdr|sdl|sdt)?[\-\ ]/,vSe=/Font ?Awesome ?([56 ]*)(Solid|Regular|Light|Thin|Duotone|Brands|Free|Pro|Sharp Duotone|Sharp|Kit)?.*/i,hk={classic:{fa:"solid",fas:"solid","fa-solid":"solid",far:"regular","fa-regular":"regular",fal:"light","fa-light":"light",fat:"thin","fa-thin":"thin",fab:"brands","fa-brands":"brands"},duotone:{fa:"solid",fad:"solid","fa-solid":"solid","fa-duotone":"solid",fadr:"regular","fa-regular":"regular",fadl:"light","fa-light":"light",fadt:"thin","fa-thin":"thin"},sharp:{fa:"solid",fass:"solid","fa-solid":"solid",fasr:"regular","fa-regular":"regular",fasl:"light","fa-light":"light",fast:"thin","fa-thin":"thin"},"sharp-duotone":{fa:"solid",fasds:"solid","fa-solid":"solid",fasdr:"regular","fa-regular":"regular",fasdl:"light","fa-light":"light",fasdt:"thin","fa-thin":"thin"}},ySe={GROUP:"duotone-group",PRIMARY:"primary",SECONDARY:"secondary"},Ak=["fa-classic","fa-duotone","fa-sharp","fa-sharp-duotone"],yn="classic",XA="duotone",wSe="sharp",CSe="sharp-duotone",gk=[yn,XA,wSe,CSe],kSe={classic:{900:"fas",400:"far",normal:"far",300:"fal",100:"fat"},duotone:{900:"fad",400:"fadr",300:"fadl",100:"fadt"},sharp:{900:"fass",400:"fasr",300:"fasl",100:"fast"},"sharp-duotone":{900:"fasds",400:"fasdr",300:"fasdl",100:"fasdt"}},ESe={"Font Awesome 6 Free":{900:"fas",400:"far"},"Font Awesome 6 Pro":{900:"fas",400:"far",normal:"far",300:"fal",100:"fat"},"Font Awesome 6 Brands":{400:"fab",normal:"fab"},"Font Awesome 6 Duotone":{900:"fad",400:"fadr",normal:"fadr",300:"fadl",100:"fadt"},"Font Awesome 6 Sharp":{900:"fass",400:"fasr",normal:"fasr",300:"fasl",100:"fast"},"Font Awesome 6 Sharp Duotone":{900:"fasds",400:"fasdr",normal:"fasdr",300:"fasdl",100:"fasdt"}},BSe=new Map([["classic",{defaultShortPrefixId:"fas",defaultStyleId:"solid",styleIds:["solid","regular","light","thin","brands"],futureStyleIds:[],defaultFontWeight:900}],["sharp",{defaultShortPrefixId:"fass",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}],["duotone",{defaultShortPrefixId:"fad",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}],["sharp-duotone",{defaultShortPrefixId:"fasds",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}]]),SSe={classic:{solid:"fas",regular:"far",light:"fal",thin:"fat",brands:"fab"},duotone:{solid:"fad",regular:"fadr",light:"fadl",thin:"fadt"},sharp:{solid:"fass",regular:"fasr",light:"fasl",thin:"fast"},"sharp-duotone":{solid:"fasd
|
|||
|
--fa-font-solid: normal 900 1em/1 "Font Awesome 6 Free";
|
|||
|
--fa-font-regular: normal 400 1em/1 "Font Awesome 6 Free";
|
|||
|
--fa-font-light: normal 300 1em/1 "Font Awesome 6 Pro";
|
|||
|
--fa-font-thin: normal 100 1em/1 "Font Awesome 6 Pro";
|
|||
|
--fa-font-duotone: normal 900 1em/1 "Font Awesome 6 Duotone";
|
|||
|
--fa-font-duotone-regular: normal 400 1em/1 "Font Awesome 6 Duotone";
|
|||
|
--fa-font-duotone-light: normal 300 1em/1 "Font Awesome 6 Duotone";
|
|||
|
--fa-font-duotone-thin: normal 100 1em/1 "Font Awesome 6 Duotone";
|
|||
|
--fa-font-brands: normal 400 1em/1 "Font Awesome 6 Brands";
|
|||
|
--fa-font-sharp-solid: normal 900 1em/1 "Font Awesome 6 Sharp";
|
|||
|
--fa-font-sharp-regular: normal 400 1em/1 "Font Awesome 6 Sharp";
|
|||
|
--fa-font-sharp-light: normal 300 1em/1 "Font Awesome 6 Sharp";
|
|||
|
--fa-font-sharp-thin: normal 100 1em/1 "Font Awesome 6 Sharp";
|
|||
|
--fa-font-sharp-duotone-solid: normal 900 1em/1 "Font Awesome 6 Sharp Duotone";
|
|||
|
--fa-font-sharp-duotone-regular: normal 400 1em/1 "Font Awesome 6 Sharp Duotone";
|
|||
|
--fa-font-sharp-duotone-light: normal 300 1em/1 "Font Awesome 6 Sharp Duotone";
|
|||
|
--fa-font-sharp-duotone-thin: normal 100 1em/1 "Font Awesome 6 Sharp Duotone";
|
|||
|
}
|
|||
|
|
|||
|
svg:not(:root).svg-inline--fa, svg:not(:host).svg-inline--fa {
|
|||
|
overflow: visible;
|
|||
|
box-sizing: content-box;
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa {
|
|||
|
display: var(--fa-display, inline-block);
|
|||
|
height: 1em;
|
|||
|
overflow: visible;
|
|||
|
vertical-align: -0.125em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-2xs {
|
|||
|
vertical-align: 0.1em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-xs {
|
|||
|
vertical-align: 0em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-sm {
|
|||
|
vertical-align: -0.0714285705em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-lg {
|
|||
|
vertical-align: -0.2em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-xl {
|
|||
|
vertical-align: -0.25em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-2xl {
|
|||
|
vertical-align: -0.3125em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-pull-left {
|
|||
|
margin-right: var(--fa-pull-margin, 0.3em);
|
|||
|
width: auto;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-pull-right {
|
|||
|
margin-left: var(--fa-pull-margin, 0.3em);
|
|||
|
width: auto;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-li {
|
|||
|
width: var(--fa-li-width, 2em);
|
|||
|
top: 0.25em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-fw {
|
|||
|
width: var(--fa-fw-width, 1.25em);
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers svg.svg-inline--fa {
|
|||
|
bottom: 0;
|
|||
|
left: 0;
|
|||
|
margin: auto;
|
|||
|
position: absolute;
|
|||
|
right: 0;
|
|||
|
top: 0;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-counter, .fa-layers-text {
|
|||
|
display: inline-block;
|
|||
|
position: absolute;
|
|||
|
text-align: center;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers {
|
|||
|
display: inline-block;
|
|||
|
height: 1em;
|
|||
|
position: relative;
|
|||
|
text-align: center;
|
|||
|
vertical-align: -0.125em;
|
|||
|
width: 1em;
|
|||
|
}
|
|||
|
.fa-layers svg.svg-inline--fa {
|
|||
|
transform-origin: center center;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-text {
|
|||
|
left: 50%;
|
|||
|
top: 50%;
|
|||
|
transform: translate(-50%, -50%);
|
|||
|
transform-origin: center center;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-counter {
|
|||
|
background-color: var(--fa-counter-background-color, #ff253a);
|
|||
|
border-radius: var(--fa-counter-border-radius, 1em);
|
|||
|
box-sizing: border-box;
|
|||
|
color: var(--fa-inverse, #fff);
|
|||
|
line-height: var(--fa-counter-line-height, 1);
|
|||
|
max-width: var(--fa-counter-max-width, 5em);
|
|||
|
min-width: var(--fa-counter-min-width, 1.5em);
|
|||
|
overflow: hidden;
|
|||
|
padding: var(--fa-counter-padding, 0.25em 0.5em);
|
|||
|
right: var(--fa-right, 0);
|
|||
|
text-overflow: ellipsis;
|
|||
|
top: var(--fa-top, 0);
|
|||
|
transform: scale(var(--fa-counter-scale, 0.25));
|
|||
|
transform-origin: top right;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-bottom-right {
|
|||
|
bottom: var(--fa-bottom, 0);
|
|||
|
right: var(--fa-right, 0);
|
|||
|
top: auto;
|
|||
|
transform: scale(var(--fa-layers-scale, 0.25));
|
|||
|
transform-origin: bottom right;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-bottom-left {
|
|||
|
bottom: var(--fa-bottom, 0);
|
|||
|
left: var(--fa-left, 0);
|
|||
|
right: auto;
|
|||
|
top: auto;
|
|||
|
transform: scale(var(--fa-layers-scale, 0.25));
|
|||
|
transform-origin: bottom left;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-top-right {
|
|||
|
top: var(--fa-top, 0);
|
|||
|
right: var(--fa-right, 0);
|
|||
|
transform: scale(var(--fa-layers-scale, 0.25));
|
|||
|
transform-origin: top right;
|
|||
|
}
|
|||
|
|
|||
|
.fa-layers-top-left {
|
|||
|
left: var(--fa-left, 0);
|
|||
|
right: auto;
|
|||
|
top: var(--fa-top, 0);
|
|||
|
transform: scale(var(--fa-layers-scale, 0.25));
|
|||
|
transform-origin: top left;
|
|||
|
}
|
|||
|
|
|||
|
.fa-1x {
|
|||
|
font-size: 1em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-2x {
|
|||
|
font-size: 2em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-3x {
|
|||
|
font-size: 3em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-4x {
|
|||
|
font-size: 4em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-5x {
|
|||
|
font-size: 5em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-6x {
|
|||
|
font-size: 6em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-7x {
|
|||
|
font-size: 7em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-8x {
|
|||
|
font-size: 8em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-9x {
|
|||
|
font-size: 9em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-10x {
|
|||
|
font-size: 10em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-2xs {
|
|||
|
font-size: 0.625em;
|
|||
|
line-height: 0.1em;
|
|||
|
vertical-align: 0.225em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-xs {
|
|||
|
font-size: 0.75em;
|
|||
|
line-height: 0.0833333337em;
|
|||
|
vertical-align: 0.125em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-sm {
|
|||
|
font-size: 0.875em;
|
|||
|
line-height: 0.0714285718em;
|
|||
|
vertical-align: 0.0535714295em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-lg {
|
|||
|
font-size: 1.25em;
|
|||
|
line-height: 0.05em;
|
|||
|
vertical-align: -0.075em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-xl {
|
|||
|
font-size: 1.5em;
|
|||
|
line-height: 0.0416666682em;
|
|||
|
vertical-align: -0.125em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-2xl {
|
|||
|
font-size: 2em;
|
|||
|
line-height: 0.03125em;
|
|||
|
vertical-align: -0.1875em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-fw {
|
|||
|
text-align: center;
|
|||
|
width: 1.25em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-ul {
|
|||
|
list-style-type: none;
|
|||
|
margin-left: var(--fa-li-margin, 2.5em);
|
|||
|
padding-left: 0;
|
|||
|
}
|
|||
|
.fa-ul > li {
|
|||
|
position: relative;
|
|||
|
}
|
|||
|
|
|||
|
.fa-li {
|
|||
|
left: calc(-1 * var(--fa-li-width, 2em));
|
|||
|
position: absolute;
|
|||
|
text-align: center;
|
|||
|
width: var(--fa-li-width, 2em);
|
|||
|
line-height: inherit;
|
|||
|
}
|
|||
|
|
|||
|
.fa-border {
|
|||
|
border-color: var(--fa-border-color, #eee);
|
|||
|
border-radius: var(--fa-border-radius, 0.1em);
|
|||
|
border-style: var(--fa-border-style, solid);
|
|||
|
border-width: var(--fa-border-width, 0.08em);
|
|||
|
padding: var(--fa-border-padding, 0.2em 0.25em 0.15em);
|
|||
|
}
|
|||
|
|
|||
|
.fa-pull-left {
|
|||
|
float: left;
|
|||
|
margin-right: var(--fa-pull-margin, 0.3em);
|
|||
|
}
|
|||
|
|
|||
|
.fa-pull-right {
|
|||
|
float: right;
|
|||
|
margin-left: var(--fa-pull-margin, 0.3em);
|
|||
|
}
|
|||
|
|
|||
|
.fa-beat {
|
|||
|
animation-name: fa-beat;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, ease-in-out);
|
|||
|
}
|
|||
|
|
|||
|
.fa-bounce {
|
|||
|
animation-name: fa-bounce;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.28, 0.84, 0.42, 1));
|
|||
|
}
|
|||
|
|
|||
|
.fa-fade {
|
|||
|
animation-name: fa-fade;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.4, 0, 0.6, 1));
|
|||
|
}
|
|||
|
|
|||
|
.fa-beat-fade {
|
|||
|
animation-name: fa-beat-fade;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.4, 0, 0.6, 1));
|
|||
|
}
|
|||
|
|
|||
|
.fa-flip {
|
|||
|
animation-name: fa-flip;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, ease-in-out);
|
|||
|
}
|
|||
|
|
|||
|
.fa-shake {
|
|||
|
animation-name: fa-shake;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, linear);
|
|||
|
}
|
|||
|
|
|||
|
.fa-spin {
|
|||
|
animation-name: fa-spin;
|
|||
|
animation-delay: var(--fa-animation-delay, 0s);
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 2s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, linear);
|
|||
|
}
|
|||
|
|
|||
|
.fa-spin-reverse {
|
|||
|
--fa-animation-direction: reverse;
|
|||
|
}
|
|||
|
|
|||
|
.fa-pulse,
|
|||
|
.fa-spin-pulse {
|
|||
|
animation-name: fa-spin;
|
|||
|
animation-direction: var(--fa-animation-direction, normal);
|
|||
|
animation-duration: var(--fa-animation-duration, 1s);
|
|||
|
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
|
|||
|
animation-timing-function: var(--fa-animation-timing, steps(8));
|
|||
|
}
|
|||
|
|
|||
|
@media (prefers-reduced-motion: reduce) {
|
|||
|
.fa-beat,
|
|||
|
.fa-bounce,
|
|||
|
.fa-fade,
|
|||
|
.fa-beat-fade,
|
|||
|
.fa-flip,
|
|||
|
.fa-pulse,
|
|||
|
.fa-shake,
|
|||
|
.fa-spin,
|
|||
|
.fa-spin-pulse {
|
|||
|
animation-delay: -1ms;
|
|||
|
animation-duration: 1ms;
|
|||
|
animation-iteration-count: 1;
|
|||
|
transition-delay: 0s;
|
|||
|
transition-duration: 0s;
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-beat {
|
|||
|
0%, 90% {
|
|||
|
transform: scale(1);
|
|||
|
}
|
|||
|
45% {
|
|||
|
transform: scale(var(--fa-beat-scale, 1.25));
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-bounce {
|
|||
|
0% {
|
|||
|
transform: scale(1, 1) translateY(0);
|
|||
|
}
|
|||
|
10% {
|
|||
|
transform: scale(var(--fa-bounce-start-scale-x, 1.1), var(--fa-bounce-start-scale-y, 0.9)) translateY(0);
|
|||
|
}
|
|||
|
30% {
|
|||
|
transform: scale(var(--fa-bounce-jump-scale-x, 0.9), var(--fa-bounce-jump-scale-y, 1.1)) translateY(var(--fa-bounce-height, -0.5em));
|
|||
|
}
|
|||
|
50% {
|
|||
|
transform: scale(var(--fa-bounce-land-scale-x, 1.05), var(--fa-bounce-land-scale-y, 0.95)) translateY(0);
|
|||
|
}
|
|||
|
57% {
|
|||
|
transform: scale(1, 1) translateY(var(--fa-bounce-rebound, -0.125em));
|
|||
|
}
|
|||
|
64% {
|
|||
|
transform: scale(1, 1) translateY(0);
|
|||
|
}
|
|||
|
100% {
|
|||
|
transform: scale(1, 1) translateY(0);
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-fade {
|
|||
|
50% {
|
|||
|
opacity: var(--fa-fade-opacity, 0.4);
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-beat-fade {
|
|||
|
0%, 100% {
|
|||
|
opacity: var(--fa-beat-fade-opacity, 0.4);
|
|||
|
transform: scale(1);
|
|||
|
}
|
|||
|
50% {
|
|||
|
opacity: 1;
|
|||
|
transform: scale(var(--fa-beat-fade-scale, 1.125));
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-flip {
|
|||
|
50% {
|
|||
|
transform: rotate3d(var(--fa-flip-x, 0), var(--fa-flip-y, 1), var(--fa-flip-z, 0), var(--fa-flip-angle, -180deg));
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-shake {
|
|||
|
0% {
|
|||
|
transform: rotate(-15deg);
|
|||
|
}
|
|||
|
4% {
|
|||
|
transform: rotate(15deg);
|
|||
|
}
|
|||
|
8%, 24% {
|
|||
|
transform: rotate(-18deg);
|
|||
|
}
|
|||
|
12%, 28% {
|
|||
|
transform: rotate(18deg);
|
|||
|
}
|
|||
|
16% {
|
|||
|
transform: rotate(-22deg);
|
|||
|
}
|
|||
|
20% {
|
|||
|
transform: rotate(22deg);
|
|||
|
}
|
|||
|
32% {
|
|||
|
transform: rotate(-12deg);
|
|||
|
}
|
|||
|
36% {
|
|||
|
transform: rotate(12deg);
|
|||
|
}
|
|||
|
40%, 100% {
|
|||
|
transform: rotate(0deg);
|
|||
|
}
|
|||
|
}
|
|||
|
@keyframes fa-spin {
|
|||
|
0% {
|
|||
|
transform: rotate(0deg);
|
|||
|
}
|
|||
|
100% {
|
|||
|
transform: rotate(360deg);
|
|||
|
}
|
|||
|
}
|
|||
|
.fa-rotate-90 {
|
|||
|
transform: rotate(90deg);
|
|||
|
}
|
|||
|
|
|||
|
.fa-rotate-180 {
|
|||
|
transform: rotate(180deg);
|
|||
|
}
|
|||
|
|
|||
|
.fa-rotate-270 {
|
|||
|
transform: rotate(270deg);
|
|||
|
}
|
|||
|
|
|||
|
.fa-flip-horizontal {
|
|||
|
transform: scale(-1, 1);
|
|||
|
}
|
|||
|
|
|||
|
.fa-flip-vertical {
|
|||
|
transform: scale(1, -1);
|
|||
|
}
|
|||
|
|
|||
|
.fa-flip-both,
|
|||
|
.fa-flip-horizontal.fa-flip-vertical {
|
|||
|
transform: scale(-1, -1);
|
|||
|
}
|
|||
|
|
|||
|
.fa-rotate-by {
|
|||
|
transform: rotate(var(--fa-rotate-angle, 0));
|
|||
|
}
|
|||
|
|
|||
|
.fa-stack {
|
|||
|
display: inline-block;
|
|||
|
vertical-align: middle;
|
|||
|
height: 2em;
|
|||
|
position: relative;
|
|||
|
width: 2.5em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-stack-1x,
|
|||
|
.fa-stack-2x {
|
|||
|
bottom: 0;
|
|||
|
left: 0;
|
|||
|
margin: auto;
|
|||
|
position: absolute;
|
|||
|
right: 0;
|
|||
|
top: 0;
|
|||
|
z-index: var(--fa-stack-z-index, auto);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa.fa-stack-1x {
|
|||
|
height: 1em;
|
|||
|
width: 1.25em;
|
|||
|
}
|
|||
|
.svg-inline--fa.fa-stack-2x {
|
|||
|
height: 2em;
|
|||
|
width: 2.5em;
|
|||
|
}
|
|||
|
|
|||
|
.fa-inverse {
|
|||
|
color: var(--fa-inverse, #fff);
|
|||
|
}
|
|||
|
|
|||
|
.sr-only,
|
|||
|
.fa-sr-only {
|
|||
|
position: absolute;
|
|||
|
width: 1px;
|
|||
|
height: 1px;
|
|||
|
padding: 0;
|
|||
|
margin: -1px;
|
|||
|
overflow: hidden;
|
|||
|
clip: rect(0, 0, 0, 0);
|
|||
|
white-space: nowrap;
|
|||
|
border-width: 0;
|
|||
|
}
|
|||
|
|
|||
|
.sr-only-focusable:not(:focus),
|
|||
|
.fa-sr-only-focusable:not(:focus) {
|
|||
|
position: absolute;
|
|||
|
width: 1px;
|
|||
|
height: 1px;
|
|||
|
padding: 0;
|
|||
|
margin: -1px;
|
|||
|
overflow: hidden;
|
|||
|
clip: rect(0, 0, 0, 0);
|
|||
|
white-space: nowrap;
|
|||
|
border-width: 0;
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa .fa-primary {
|
|||
|
fill: var(--fa-primary-color, currentColor);
|
|||
|
opacity: var(--fa-primary-opacity, 1);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa .fa-secondary {
|
|||
|
fill: var(--fa-secondary-color, currentColor);
|
|||
|
opacity: var(--fa-secondary-opacity, 0.4);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa.fa-swap-opacity .fa-primary {
|
|||
|
opacity: var(--fa-secondary-opacity, 0.4);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa.fa-swap-opacity .fa-secondary {
|
|||
|
opacity: var(--fa-primary-opacity, 1);
|
|||
|
}
|
|||
|
|
|||
|
.svg-inline--fa mask .fa-primary,
|
|||
|
.svg-inline--fa mask .fa-secondary {
|
|||
|
fill: black;
|
|||
|
}`;function Ck(){const e=pk,t=_k,i=mt.cssPrefix,n=mt.replacementClass;let a=oDe;if(i!==e||n!==t){const s=new RegExp("\\.".concat(e,"\\-"),"g"),r=new RegExp("\\--".concat(e,"\\-"),"g"),o=new RegExp("\\.".concat(t),"g");a=a.replace(s,".".concat(i,"-")).replace(r,"--".concat(i,"-")).replace(o,".".concat(n))}return a}let Py=!1;function Om(){mt.autoAddCss&&!Py&&(iDe(Ck()),Py=!0)}var lDe={mixout(){return{dom:{css:Ck,insertCss:Om}}},hooks(){return{beforeDOMElementCreation(){Om()},beforeI2svg(){Om()}}}};const Hs=Sr||{};Hs[Ns]||(Hs[Ns]={});Hs[Ns].styles||(Hs[Ns].styles={});Hs[Ns].hooks||(Hs[Ns].hooks={});Hs[Ns].shims||(Hs[Ns].shims=[]);var Ka=Hs[Ns];const kk=[],Ek=function(){Fi.removeEventListener("DOMContentLoaded",Ek),tA=1,kk.map(e=>e())};let tA=!1;zs&&(tA=(Fi.documentElement.doScroll?/^loaded|^c/:/^loaded|^i|^c/).test(Fi.readyState),tA||Fi.addEventListener("DOMContentLoaded",Ek));function uDe(e){zs&&(tA?setTimeout(e,0):kk.push(e))}function Ad(e){const{tag:t,attributes:i={},children:n=[]}=e;return typeof e=="string"?wk(e):"<".concat(t," ").concat(aDe(i),">").concat(n.map(Ad).join(""),"</").concat(t,">")}function My(e,t,i){if(e&&e[t]&&e[t][i])return{prefix:t,iconName:i,icon:e[t][i]}}var Nm=function(t,i,n,a){var s=Object.keys(t),r=s.length,o=i,l,c,f;for(n===void 0?(l=1,f=t[s[0]]):(l=0,f=n);l<r;l++)c=s[l],f=o(f,t[c],c,t);return f};function cDe(e){const t=[];let i=0;const n=e.length;for(;i<n;){const a=e.charCodeAt(i++);if(a>=55296&&a<=56319&&i<n){const s=e.charCodeAt(i++);(s&64512)==56320?t.push(((a&1023)<<10)+(s&1023)+65536):(t.push(a),i--)}else t.push(a)}return t}function N_(e){const t=cDe(e);return t.length===1?t[0].toString(16):null}function dDe(e,t){const i=e.length;let n=e.charCodeAt(t),a;return n>=55296&&n<=56319&&i>t+1&&(a=e.charCodeAt(t+1),a>=56320&&a<=57343)?(n-55296)*1024+a-56320+65536:n}function Ly(e){return Object.keys(e).reduce((t,i)=>{const n=e[i];return!!n.icon?t[n.iconName]=n.icon:t[i]=n,t},{})}function H_(e,t){let i=arguments.length>2&&arguments[2]!==void 0?arguments[2]:{};const{skipHooks:n=!1}=i,a=Ly(t);typeof Ka.hooks.addPack=="function"&&!n?Ka.hooks.addPack(e,Ly(t)):Ka.styles[e]=Ye(Ye({},Ka.styles[e]||{}),a),e==="fas"&&H_("fa",t)}const{styles:Zc,shims:fDe}=Ka,Bk=Object.keys(n0),hDe=Bk.reduce((e,t)=>(e[t]=Object.keys(n0[t]),e),{});let r0=null,Sk={},Dk={},xk={},Tk={},Ik={};function ADe(e){return~ZSe.indexOf(e)}function gDe(e,t){const i=t.split("-"),n=i[0],a=i.slice(1).join("-");return n===e&&a!==""&&!ADe(a)?a:null}const Pk=()=>{const e=n=>Nm(Zc,(a,s,r)=>(a[r]=Nm(s,n,{}),a),{});Sk=e((n,a,s)=>(a[3]&&(n[a[3]]=s),a[2]&&a[2].filter(o=>typeof o=="number").forEach(o=>{n[o.toString(16)]=s}),n)),Dk=e((n,a,s)=>(n[s]=s,a[2]&&a[2].filter(o=>typeof o=="string").forEach(o=>{n[o]=s}),n)),Ik=e((n,a,s)=>{const r=a[2];return n[s]=s,r.forEach(o=>{n[o]=s}),n});const t="far"in Zc||mt.autoFetchSvg,i=Nm(fDe,(n,a)=>{const s=a[0];let r=a[1];const o=a[2];return r==="far"&&!t&&(r="fas"),typeof s=="string"&&(n.names[s]={prefix:r,iconName:o}),typeof s=="number"&&(n.unicodes[s.toString(16)]={prefix:r,iconName:o}),n},{names:{},unicodes:{}});xk=i.names,Tk=i.unicodes,r0=ZA(mt.styleDefault,{family:mt.familyDefault})};tDe(e=>{r0=ZA(e.styleDefault,{family:mt.familyDefault})});Pk();function o0(e,t){return(Sk[e]||{})[t]}function mDe(e,t){return(Dk[e]||{})[t]}function to(e,t){return(Ik[e]||{})[t]}function Mk(e){return xk[e]||{prefix:null,iconName:null}}function pDe(e){const t=Tk[e],i=o0("fas",e);return t||(i?{prefix:"fas",iconName:i}:null)||{prefix:null,iconName:null}}function Dr(){return r0}const Lk=()=>({prefix:null,iconName:null,rest:[]});function _De(e){let t=yn;const i=Bk.reduce((n,a)=>(n[a]="".concat(mt.cssPrefix,"-").concat(a),n),{});return gk.forEach(n=>{(e.includes(i[n])||e.some(a=>hDe[n].includes(a)))&&(t=n)}),t}function ZA(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};const{family:i=yn}=t,n=WSe[i][e];if(i===XA&&!e)return"fad";const a=Iy[i][e]||Iy[i][n],s=e in Ka.styles?e:null;return a||s||null}function FDe(e){let t=[],i=null;return e.forEach(n=>{const a=gDe(mt.cssPrefix,n);a?i=a:n&&t.push(n)}),{iconName:i,rest:t}}funct
|
|||
|
`);t.setAttribute(vo,""),t.innerHTML=a}};function Qy(e){e()}function Nk(e,t){const i=typeof t=="function"?t:Mf;if(e.length===0)i();else{let n=Qy;mt.mutateApproach===qSe&&(n=Sr.requestAnimationFrame||Qy),n(()=>{const a=NDe(),s=u0.begin("mutate");e.map(a),s(),i()})}}let c0=!1;function Hk(){c0=!0}function V_(){c0=!1}let iA=null;function jy(e){if(!Sy||!mt.observeMutations)return;const{treeCallback:t=Mf,nodeCallback:i=Mf,pseudoElementsCallback:n=Mf,observeMutationsRoot:a=Fi}=e;iA=new Sy(s=>{if(c0)return;const r=Dr();Kl(s).forEach(o=>{if(o.type==="childList"&&o.addedNodes.length>0&&!Hy(o.addedNodes[0])&&(mt.searchPseudoElements&&n(o.target),t(o.target)),o.type==="attributes"&&o.target.parentNode&&mt.searchPseudoElements&&n(o.target.parentNode),o.type==="attributes"&&Hy(o.target)&&~JSe.indexOf(o.attributeName))if(o.attributeName==="class"&&UDe(o.target)){const{prefix:l,iconName:c}=$A(a0(o.target));o.target.setAttribute(t0,l||r),c&&o.target.setAttribute(i0,c)}else ODe(o.target)&&i(o.target)})}),zs&&iA.observe(a,{childList:!0,attributes:!0,characterData:!0,subtree:!0})}function GDe(){iA&&iA.disconnect()}function zDe(e){const t=e.getAttribute("style");let i=[];return t&&(i=t.split(";").reduce((n,a)=>{const s=a.split(":"),r=s[0],o=s.slice(1);return r&&o.length>0&&(n[r]=o.join(":").trim()),n},{})),i}function qDe(e){const t=e.getAttribute("data-prefix"),i=e.getAttribute("data-icon"),n=e.innerText!==void 0?e.innerText.trim():"";let a=$A(a0(e));return a.prefix||(a.prefix=Dr()),t&&i&&(a.prefix=t,a.iconName=i),a.iconName&&a.prefix||(a.prefix&&n.length>0&&(a.iconName=mDe(a.prefix,e.innerText)||o0(a.prefix,N_(e.innerText))),!a.iconName&&mt.autoFetchSvg&&e.firstChild&&e.firstChild.nodeType===Node.TEXT_NODE&&(a.iconName=e.firstChild.data)),a}function VDe(e){const t=Kl(e.attributes).reduce((a,s)=>(a.name!=="class"&&a.name!=="style"&&(a[s.name]=s.value),a),{}),i=e.getAttribute("title"),n=e.getAttribute("data-fa-title-id");return mt.autoA11y&&(i?t["aria-labelledby"]="".concat(mt.replacementClass,"-title-").concat(n||Jc()):(t["aria-hidden"]="true",t.focusable="false")),t}function WDe(){return{iconName:null,title:null,titleId:null,prefix:null,transform:Wa,symbol:!1,mask:{iconName:null,prefix:null,rest:[]},maskId:null,extra:{classes:[],styles:{},attributes:{}}}}function Gy(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{styleParser:!0};const{iconName:i,prefix:n,rest:a}=qDe(e),s=VDe(e),r=Q_("parseNodeAttributes",{},e);let o=t.styleParser?zDe(e):[];return Ye({iconName:i,title:e.getAttribute("title"),titleId:e.getAttribute("data-fa-title-id"),prefix:n,transform:Wa,mask:{iconName:null,prefix:null,rest:[]},maskId:null,symbol:!1,extra:{classes:a,styles:o,attributes:s}},r)}const{styles:KDe}=Ka;function Qk(e){const t=mt.autoReplaceSvg==="nest"?Gy(e,{styleParser:!1}):Gy(e);return~t.extra.classes.indexOf(vk)?xr("generateLayersText",e,t):xr("generateSvgReplacementMutation",e,t)}function YDe(){return[...DSe,...P_]}function zy(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:null;if(!zs)return Promise.resolve();const i=Fi.documentElement.classList,n=f=>i.add("".concat(Ty,"-").concat(f)),a=f=>i.remove("".concat(Ty,"-").concat(f)),s=mt.autoFetchSvg?YDe():Ak.concat(Object.keys(KDe));s.includes("fa")||s.push("fa");const r=[".".concat(vk,":not([").concat(vo,"])")].concat(s.map(f=>".".concat(f,":not([").concat(vo,"])"))).join(", ");if(r.length===0)return Promise.resolve();let o=[];try{o=Kl(e.querySelectorAll(r))}catch{}if(o.length>0)n("pending"),a("complete");else return Promise.resolve();const l=u0.begin("onTree"),c=o.reduce((f,h)=>{try{const A=Qk(h);A&&f.push(A)}catch(A){Fk||A.name==="MissingIcon"&&console.error(A)}return f},[]);return new Promise((f,h)=>{Promise.all(c).then(A=>{Nk(A,()=>{n("active"),n("complete"),a("pending"),typeof t=="function"&&t(),l(),f()})}).catch(A=>{l(),h(A)})})}function XDe(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:null;Qk(e).then(i=>{i&&Nk([i],t)})}function JDe(e){return function(t){let i=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};const n=(t||{}).icon?t:j_(t||{});let{m
|
|||
|
`),e.removeAttribute(i),n()}).catch(a)}else n()}else n()})}function oxe(e){return Promise.all([Wy(e,"::before"),Wy(e,"::after")])}function lxe(e){return e.parentNode!==document.head&&!~VSe.indexOf(e.tagName.toUpperCase())&&!e.getAttribute(L_)&&(!e.parentNode||e.parentNode.tagName!=="svg")}function Ky(e){if(zs)return new Promise((t,i)=>{const n=Kl(e.querySelectorAll("*")).filter(lxe).map(oxe),a=u0.begin("searchPseudoElements");Hk(),Promise.all(n).then(()=>{a(),V_(),t()}).catch(()=>{a(),V_(),i()})})}var uxe={hooks(){return{mutationObserverCallbacks(e){return e.pseudoElementsCallback=Ky,e}}},provides(e){e.pseudoElements2svg=function(t){const{node:i=Fi}=t;mt.searchPseudoElements&&Ky(i)}}};let Yy=!1;var cxe={mixout(){return{dom:{unwatch(){Hk(),Yy=!0}}}},hooks(){return{bootstrap(){jy(Q_("mutationObserverCallbacks",{}))},noAuto(){GDe()},watch(e){const{observeMutationsRoot:t}=e;Yy?V_():jy(Q_("mutationObserverCallbacks",{observeMutationsRoot:t}))}}}};const Xy=e=>{let t={size:16,x:0,y:0,flipX:!1,flipY:!1,rotate:0};return e.toLowerCase().split(" ").reduce((i,n)=>{const a=n.toLowerCase().split("-"),s=a[0];let r=a.slice(1).join("-");if(s&&r==="h")return i.flipX=!0,i;if(s&&r==="v")return i.flipY=!0,i;if(r=parseFloat(r),isNaN(r))return i;switch(s){case"grow":i.size=i.size+r;break;case"shrink":i.size=i.size-r;break;case"left":i.x=i.x-r;break;case"right":i.x=i.x+r;break;case"up":i.y=i.y-r;break;case"down":i.y=i.y+r;break;case"rotate":i.rotate=i.rotate+r;break}return i},t)};var dxe={mixout(){return{parse:{transform:e=>Xy(e)}}},hooks(){return{parseNodeAttributes(e,t){const i=t.getAttribute("data-fa-transform");return i&&(e.transform=Xy(i)),e}}},provides(e){e.generateAbstractTransformGrouping=function(t){let{main:i,transform:n,containerWidth:a,iconWidth:s}=t;const r={transform:"translate(".concat(a/2," 256)")},o="translate(".concat(n.x*32,", ").concat(n.y*32,") "),l="scale(".concat(n.size/16*(n.flipX?-1:1),", ").concat(n.size/16*(n.flipY?-1:1),") "),c="rotate(".concat(n.rotate," 0 0)"),f={transform:"".concat(o," ").concat(l," ").concat(c)},h={transform:"translate(".concat(s/2*-1," -256)")},A={outer:r,inner:f,path:h};return{tag:"g",attributes:Ye({},A.outer),children:[{tag:"g",attributes:Ye({},A.inner),children:[{tag:i.icon.tag,children:i.icon.children,attributes:Ye(Ye({},i.icon.attributes),A.path)}]}]}}}};const Qm={x:0,y:0,width:"100%",height:"100%"};function Jy(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:!0;return e.attributes&&(e.attributes.fill||t)&&(e.attributes.fill="black"),e}function fxe(e){return e.tag==="g"?e.children:[e]}var hxe={hooks(){return{parseNodeAttributes(e,t){const i=t.getAttribute("data-fa-mask"),n=i?$A(i.split(" ").map(a=>a.trim())):Lk();return n.prefix||(n.prefix=Dr()),e.mask=n,e.maskId=t.getAttribute("data-fa-mask-id"),e}}},provides(e){e.generateAbstractMask=function(t){let{children:i,attributes:n,main:a,mask:s,maskId:r,transform:o}=t;const{width:l,icon:c}=a,{width:f,icon:h}=s,A=sDe({transform:o,containerWidth:f,iconWidth:l}),m={tag:"rect",attributes:Ye(Ye({},Qm),{},{fill:"white"})},F=c.children?{children:c.children.map(Jy)}:{},y={tag:"g",attributes:Ye({},A.inner),children:[Jy(Ye({tag:c.tag,attributes:Ye(Ye({},c.attributes),A.path)},F))]},k={tag:"g",attributes:Ye({},A.outer),children:[y]},C="mask-".concat(r||Jc()),w="clip-".concat(r||Jc()),B={tag:"mask",attributes:Ye(Ye({},Qm),{},{id:C,maskUnits:"userSpaceOnUse",maskContentUnits:"userSpaceOnUse"}),children:[m,k]},S={tag:"defs",children:[{tag:"clipPath",attributes:{id:w},children:fxe(h)},B]};return i.push(S,{tag:"rect",attributes:Ye({fill:"currentColor","clip-path":"url(#".concat(w,")"),mask:"url(#".concat(C,")")},Qm)}),{children:i,attributes:n}}}},Axe={provides(e){let t=!1;Sr.matchMedia&&(t=Sr.matchMedia("(prefers-reduced-motion: reduce)").matches),e.missingIconAbstract=function(){const i=[],n={fill:"currentColor"},a={attributeType:"XML",repeatCount:"indefinite",dur:"2s"};i.push({tag:"path",attributes:Ye(Ye({},n),{},{d:"M156.5,447.7l-12.6,29.5c-18.7-9.5-35.9-21.2-51.5-34.9l22.7-22.7C127.6,430.5,141.5,440,156.5,447.7z M40.6,272H8.5 c1.4,21.2,5.4,41
|
|||
|
* Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com
|
|||
|
* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
|
|||
|
* Copyright 2024 Fonticons, Inc.
|
|||
|
*/const Txe={prefix:"fas",iconName:"forward-step",icon:[320,512,["step-forward"],"f051","M52.5 440.6c-9.5 7.9-22.8 9.7-34.1 4.4S0 428.4 0 416L0 96C0 83.6 7.2 72.3 18.4 67s24.5-3.6 34.1 4.4l192 160L256 241l0-145c0-17.7 14.3-32 32-32s32 14.3 32 32l0 320c0 17.7-14.3 32-32 32s-32-14.3-32-32l0-145-11.5 9.6-192 160z"]},Ixe=Txe,Pxe={prefix:"fas",iconName:"address-book",icon:[512,512,[62138,"contact-book"],"f2b9","M96 0C60.7 0 32 28.7 32 64l0 384c0 35.3 28.7 64 64 64l288 0c35.3 0 64-28.7 64-64l0-384c0-35.3-28.7-64-64-64L96 0zM208 288l64 0c44.2 0 80 35.8 80 80c0 8.8-7.2 16-16 16l-192 0c-8.8 0-16-7.2-16-16c0-44.2 35.8-80 80-80zm-32-96a64 64 0 1 1 128 0 64 64 0 1 1 -128 0zM512 80c0-8.8-7.2-16-16-16s-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64zM496 192c-8.8 0-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64c0-8.8-7.2-16-16-16zm16 144c0-8.8-7.2-16-16-16s-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64z"]},Mxe={prefix:"fas",iconName:"backward",icon:[512,512,[9194],"f04a","M459.5 440.6c9.5 7.9 22.8 9.7 34.1 4.4s18.4-16.6 18.4-29l0-320c0-12.4-7.2-23.7-18.4-29s-24.5-3.6-34.1 4.4L288 214.3l0 41.7 0 41.7L459.5 440.6zM256 352l0-96 0-128 0-32c0-12.4-7.2-23.7-18.4-29s-24.5-3.6-34.1 4.4l-192 160C4.2 237.5 0 246.5 0 256s4.2 18.5 11.5 24.6l192 160c9.5 7.9 22.8 9.7 34.1 4.4s18.4-16.6 18.4-29l0-64z"]},Lxe={prefix:"fas",iconName:"volume-low",icon:[448,512,[128264,"volume-down"],"f027","M301.1 34.8C312.6 40 320 51.4 320 64l0 384c0 12.6-7.4 24-18.9 29.2s-25 3.1-34.4-5.3L131.8 352 64 352c-35.3 0-64-28.7-64-64l0-64c0-35.3 28.7-64 64-64l67.8 0L266.7 40.1c9.4-8.4 22.9-10.4 34.4-5.3zM412.6 181.5C434.1 199.1 448 225.9 448 256s-13.9 56.9-35.4 74.5c-10.3 8.4-25.4 6.8-33.8-3.5s-6.8-25.4 3.5-33.8C393.1 284.4 400 271 400 256s-6.9-28.4-17.7-37.3c-10.3-8.4-11.8-23.5-3.5-33.8s23.5-11.8 33.8-3.5z"]},Rxe={prefix:"fas",iconName:"lock",icon:[448,512,[128274],"f023","M144 144l0 48 160 0 0-48c0-44.2-35.8-80-80-80s-80 35.8-80 80zM80 192l0-48C80 64.5 144.5 0 224 0s144 64.5 144 144l0 48 16 0c35.3 0 64 28.7 64 64l0 192c0 35.3-28.7 64-64 64L64 512c-35.3 0-64-28.7-64-64L0 256c0-35.3 28.7-64 64-64l16 0z"]},Uxe={prefix:"fas",iconName:"angle-right",icon:[320,512,[8250],"f105","M278.6 233.4c12.5 12.5 12.5 32.8 0 45.3l-160 160c-12.5 12.5-32.8 12.5-45.3 0s-12.5-32.8 0-45.3L210.7 256 73.4 118.6c-12.5-12.5-12.5-32.8 0-45.3s32.8-12.5 45.3 0l160 160z"]},Oxe={prefix:"fas",iconName:"globe",icon:[512,512,[127760],"f0ac","M352 256c0 22.2-1.2 43.6-3.3 64l-185.3 0c-2.2-20.4-3.3-41.8-3.3-64s1.2-43.6 3.3-64l185.3 0c2.2 20.4 3.3 41.8 3.3 64zm28.8-64l123.1 0c5.3 20.5 8.1 41.9 8.1 64s-2.8 43.5-8.1 64l-123.1 0c2.1-20.6 3.2-42 3.2-64s-1.1-43.4-3.2-64zm112.6-32l-116.7 0c-10-63.9-29.8-117.4-55.3-151.6c78.3 20.7 142 77.5 171.9 151.6zm-149.1 0l-176.6 0c6.1-36.4 15.5-68.6 27-94.7c10.5-23.6 22.2-40.7 33.5-51.5C239.4 3.2 248.7 0 256 0s16.6 3.2 27.8 13.8c11.3 10.8 23 27.9 33.5 51.5c11.6 26 20.9 58.2 27 94.7zm-209 0L18.6 160C48.6 85.9 112.2 29.1 190.6 8.4C165.1 42.6 145.3 96.1 135.3 160zM8.1 192l123.1 0c-2.1 20.6-3.2 42-3.2 64s1.1 43.4 3.2 64L8.1 320C2.8 299.5 0 278.1 0 256s2.8-43.5 8.1-64zM194.7 446.6c-11.6-26-20.9-58.2-27-94.6l176.6 0c-6.1 36.4-15.5 68.6-27 94.6c-10.5 23.6-22.2 40.7-33.5 51.5C272.6 508.8 263.3 512 256 512s-16.6-3.2-27.8-13.8c-11.3-10.8-23-27.9-33.5-51.5zM135.3 352c10 63.9 29.8 117.4 55.3 151.6C112.2 482.9 48.6 426.1 18.6 352l116.7 0zm358.1 0c-30 74.1-93.6 130.9-171.9 151.6c25.5-34.2 45.2-87.7 55.3-151.6l116.7 0z"]},Nxe={prefix:"fas",iconName:"server",icon:[512,512,[],"f233","M64 32C28.7 32 0 60.7 0 96l0 64c0 35.3 28.7 64 64 64l384 0c35.3 0 64-28.7 64-64l0-64c0-35.3-28.7-64-64-64L64 32zm280 72a24 24 0 1 1 0 48 24 24 0 1 1 0-48zm48 24a24 24 0 1 1 48 0 24 24 0 1 1 -48 0zM64 288c-35.3 0-64 28.7-64 64l0 64c0 35.3 28.7 64 64 64l384 0c35.3 0 64-28.7 64-64l0-64c0-35.3-28.7-64-64-64L64 288zm280 72a24 24 0 1 1 0 48 24 24 0 1 1 0-48zm56 24a24 24 0 1 1 48 0 24 24 0 1 1 -48 0z"]},Hxe={prefix:"fas",iconName:"pause",icon:[320,512,[9208],"f04c","M48 64C21.5 64 0 85.5 0 112L0 400c0 26.5 21.5 48 48 48l32 0c26.5 0 48-21.5 48-48l0-288c0-26.5-21.5-48-48-48L48 64zm192 0c-26.5 0-48 21.
|