1
0
Fork 0
forked from Simnation/Main
Main/resources/[phone]/roadphone/public/assets/index-O0x8XRdw.js

4872 lines
3 MiB
JavaScript
Raw Normal View History

2025-06-07 08:51:21 +02:00
(function(){const t=document.createElement("link").relList;if(t&&t.supports&&t.supports("modulepreload"))return;for(const a of document.querySelectorAll('link[rel="modulepreload"]'))n(a);new MutationObserver(a=>{for(const s of a)if(s.type==="childList")for(const r of s.addedNodes)r.tagName==="LINK"&&r.rel==="modulepreload"&&n(r)}).observe(document,{childList:!0,subtree:!0});function i(a){const s={};return a.integrity&&(s.integrity=a.integrity),a.referrerPolicy&&(s.referrerPolicy=a.referrerPolicy),a.crossOrigin==="use-credentials"?s.credentials="include":a.crossOrigin==="anonymous"?s.credentials="omit":s.credentials="same-origin",s}function n(a){if(a.ep)return;a.ep=!0;const s=i(a);fetch(a.href,s)}})();/**
* @vue/shared v3.5.13
* (c) 2018-present Yuxi (Evan) You and Vue contributors
* @license MIT
**//*! #__NO_SIDE_EFFECTS__ */function G_(e){const t=Object.create(null);for(const i of e.split(","))t[i]=1;return i=>i in t}const ri={},il=[],Wa=()=>{},Nk=()=>!1,$A=e=>e.charCodeAt(0)===111&&e.charCodeAt(1)===110&&(e.charCodeAt(2)>122||e.charCodeAt(2)<97),z_=e=>e.startsWith("onUpdate:"),$i=Object.assign,q_=(e,t)=>{const i=e.indexOf(t);i>-1&&e.splice(i,1)},Hk=Object.prototype.hasOwnProperty,Zt=(e,t)=>Hk.call(e,t),Ct=Array.isArray,nl=e=>eh(e)==="[object Map]",K2=e=>eh(e)==="[object Set]",Bt=e=>typeof e=="function",Ti=e=>typeof e=="string",Ls=e=>typeof e=="symbol",Fi=e=>e!==null&&typeof e=="object",Y2=e=>(Fi(e)||Bt(e))&&Bt(e.then)&&Bt(e.catch),X2=Object.prototype.toString,eh=e=>X2.call(e),Qk=e=>eh(e).slice(8,-1),J2=e=>eh(e)==="[object Object]",V_=e=>Ti(e)&&e!=="NaN"&&e[0]!=="-"&&""+parseInt(e,10)===e,Su=G_(",key,ref,ref_for,ref_key,onVnodeBeforeMount,onVnodeMounted,onVnodeBeforeUpdate,onVnodeUpdated,onVnodeBeforeUnmount,onVnodeUnmounted"),th=e=>{const t=Object.create(null);return i=>t[i]||(t[i]=e(i))},jk=/-(\w)/g,ha=th(e=>e.replace(jk,(t,i)=>i?i.toUpperCase():"")),Gk=/\B([A-Z])/g,Cr=th(e=>e.replace(Gk,"-$1").toLowerCase()),ih=th(e=>e.charAt(0).toUpperCase()+e.slice(1)),_g=th(e=>e?`on${ih(e)}`:""),or=(e,t)=>!Object.is(e,t),ff=(e,...t)=>{for(let i=0;i<e.length;i++)e[i](...t)},Z2=(e,t,i,n=!1)=>{Object.defineProperty(e,t,{configurable:!0,enumerable:!1,writable:n,value:i})},Rp=e=>{const t=parseFloat(e);return isNaN(t)?e:t},zk=e=>{const t=Ti(e)?Number(e):NaN;return isNaN(t)?e:t};let V0;const nh=()=>V0||(V0=typeof globalThis<"u"?globalThis:typeof self<"u"?self:typeof window<"u"?window:typeof global<"u"?global:{});function $e(e){if(Ct(e)){const t={};for(let i=0;i<e.length;i++){const n=e[i],a=Ti(n)?Kk(n):$e(n);if(a)for(const s in a)t[s]=a[s]}return t}else if(Ti(e)||Fi(e))return e}const qk=/;(?![^(]*\))/g,Vk=/:([^]+)/,Wk=/\/\*[^]*?\*\//g;function Kk(e){const t={};return e.replace(Wk,"").split(qk).forEach(i=>{if(i){const n=i.split(Vk);n.length>1&&(t[n[0].trim()]=n[1].trim())}}),t}function H(e){let t="";if(Ti(e))t=e;else if(Ct(e))for(let i=0;i<e.length;i++){const n=H(e[i]);n&&(t+=n+" ")}else if(Fi(e))for(const i in e)e[i]&&(t+=i+" ");return t.trim()}const Yk="itemscope,allowfullscreen,formnovalidate,ismap,nomodule,novalidate,readonly",Xk=G_(Yk);function $2(e){return!!e||e===""}const ey=e=>!!(e&&e.__v_isRef===!0),m=e=>Ti(e)?e:e==null?"":Ct(e)||Fi(e)&&(e.toString===X2||!Bt(e.toString))?ey(e)?m(e.value):JSON.stringify(e,ty,2):String(e),ty=(e,t)=>ey(t)?ty(e,t.value):nl(t)?{[`Map(${t.size})`]:[...t.entries()].reduce((i,[n,a],s)=>(i[Fg(n,s)+" =>"]=a,i),{})}:K2(t)?{[`Set(${t.size})`]:[...t.values()].map(i=>Fg(i))}:Ls(t)?Fg(t):Fi(t)&&!Ct(t)&&!J2(t)?String(t):t,Fg=(e,t="")=>{var i;return Ls(e)?`Symbol(${(i=e.description)!=null?i:t})`:e};/**
* @vue/reactivity v3.5.13
* (c) 2018-present Yuxi (Evan) You and Vue contributors
* @license MIT
**/let yn;class iy{constructor(t=!1){this.detached=t,this._active=!0,this.effects=[],this.cleanups=[],this._isPaused=!1,this.parent=yn,!t&&yn&&(this.index=(yn.scopes||(yn.scopes=[])).push(this)-1)}get active(){return this._active}pause(){if(this._active){this._isPaused=!0;let t,i;if(this.scopes)for(t=0,i=this.scopes.length;t<i;t++)this.scopes[t].pause();for(t=0,i=this.effects.length;t<i;t++)this.effects[t].pause()}}resume(){if(this._active&&this._isPaused){this._isPaused=!1;let t,i;if(this.scopes)for(t=0,i=this.scopes.length;t<i;t++)this.scopes[t].resume();for(t=0,i=this.effects.length;t<i;t++)this.effects[t].resume()}}run(t){if(this._active){const i=yn;try{return yn=this,t()}finally{yn=i}}}on(){yn=this}off(){yn=this.parent}stop(t){if(this._active){this._active=!1;let i,n;for(i=0,n=this.effects.length;i<n;i++)this.effects[i].stop();for(this.effects.length=0,i=0,n=this.cleanups.length;i<n;i++)this.cleanups[i]();if(this.cleanups.length=0,this.scopes){for(i=0,n=this.scopes.length;i<n;i++)this.scopes[i].stop(!0);this.scopes.length=0}if(!this.detached&&this.parent&&!t){const a=this.parent.scopes.pop();a&&a!==this&&(this.parent.scopes[this.index]=a,a.index=this.index)}this.parent=void 0}}}function ny(e){return new iy(e)}function ay(){return yn}function Jk(e,t=!1){yn&&yn.cleanups.push(e)}let Ai;const bg=new WeakSet;class sy{constructor(t){this.fn=t,this.deps=void 0,this.depsTail=void 0,this.flags=5,this.next=void 0,this.cleanup=void 0,this.scheduler=void 0,yn&&yn.active&&yn.effects.push(this)}pause(){this.flags|=64}resume(){this.flags&64&&(this.flags&=-65,bg.has(this)&&(bg.delete(this),this.trigger()))}notify(){this.flags&2&&!(this.flags&32)||this.flags&8||oy(this)}run(){if(!(this.flags&1))return this.fn();this.flags|=2,W0(this),ly(this);const t=Ai,i=Ea;Ai=this,Ea=!0;try{return this.fn()}finally{uy(this),Ai=t,Ea=i,this.flags&=-3}}stop(){if(this.flags&1){for(let t=this.deps;t;t=t.nextDep)Y_(t);this.deps=this.depsTail=void 0,W0(this),this.onStop&&this.onStop(),this.flags&=-2}}trigger(){this.flags&64?bg.add(this):this.scheduler?this.scheduler():this.runIfDirty()}runIfDirty(){Up(this)&&this.run()}get dirty(){return Up(this)}}let ry=0,Du,xu;function oy(e,t=!1){if(e.flags|=8,t){e.next=xu,xu=e;return}e.next=Du,Du=e}function W_(){ry++}function K_(){if(--ry>0)return;if(xu){let t=xu;for(xu=void 0;t;){const i=t.next;t.next=void 0,t.flags&=-9,t=i}}let e;for(;Du;){let t=Du;for(Du=void 0;t;){const i=t.next;if(t.next=void 0,t.flags&=-9,t.flags&1)try{t.trigger()}catch(n){e||(e=n)}t=i}}if(e)throw e}function ly(e){for(let t=e.deps;t;t=t.nextDep)t.version=-1,t.prevActiveLink=t.dep.activeLink,t.dep.activeLink=t}function uy(e){let t,i=e.depsTail,n=i;for(;n;){const a=n.prevDep;n.version===-1?(n===i&&(i=a),Y_(n),Zk(n)):t=n,n.dep.activeLink=n.prevActiveLink,n.prevActiveLink=void 0,n=a}e.deps=t,e.depsTail=i}function Up(e){for(let t=e.deps;t;t=t.nextDep)if(t.dep.version!==t.version||t.dep.computed&&(cy(t.dep.computed)||t.dep.version!==t.version))return!0;return!!e._dirty}function cy(e){if(e.flags&4&&!(e.flags&16)||(e.flags&=-17,e.globalVersion===Ku))return;e.globalVersion=Ku;const t=e.dep;if(e.flags|=2,t.version>0&&!e.isSSR&&e.deps&&!Up(e)){e.flags&=-3;return}const i=Ai,n=Ea;Ai=e,Ea=!0;try{ly(e);const a=e.fn(e._value);(t.version===0||or(a,e._value))&&(e._value=a,t.version++)}catch(a){throw t.version++,a}finally{Ai=i,Ea=n,uy(e),e.flags&=-3}}function Y_(e,t=!1){const{dep:i,prevSub:n,nextSub:a}=e;if(n&&(n.nextSub=a,e.prevSub=void 0),a&&(a.prevSub=n,e.nextSub=void 0),i.subs===e&&(i.subs=n,!n&&i.computed)){i.computed.flags&=-5;for(let s=i.computed.deps;s;s=s.nextDep)Y_(s,!0)}!t&&!--i.sc&&i.map&&i.map.delete(i.key)}function Zk(e){const{prevDep:t,nextDep:i}=e;t&&(t.nextDep=i,e.prevDep=void 0),i&&(i.prevDep=t,e.nextDep=void 0)}let Ea=!0;const dy=[];function Er(){dy.push(Ea),Ea=!1}function kr(){const e=dy.pop();Ea=e===void 0?!0:e}function W0(e){const{cleanup:t}=e;if(e.cleanup=void 0,t){const i=Ai;Ai=void 0;try{t()}finally{Ai=i}}}let Ku=0;class $k{constructor(t,i){this.sub=t,this.dep=i,this.version=i.version,this.nextDep=this.prevDep=this.nextSub=this.prevSub=th
* @vue/runtime-core v3.5.13
* (c) 2018-present Yuxi (Evan) You and Vue contributors
* @license MIT
**/function Jc(e,t,i,n){try{return n?e(...n):e()}catch(a){sh(a,t,i)}}function Ba(e,t,i,n){if(Bt(e)){const a=Jc(e,t,i,n);return a&&Y2(a)&&a.catch(s=>{sh(s,t,i)}),a}if(Ct(e)){const a=[];for(let s=0;s<e.length;s++)a.push(Ba(e[s],t,i,n));return a}}function sh(e,t,i,n=!0){const a=t?t.vnode:null,{errorHandler:s,throwUnhandledErrorInProduction:r}=t&&t.appContext.config||ri;if(t){let o=t.parent;const l=t.proxy,u=`https://vuejs.org/error-reference/#runtime-${i}`;for(;o;){const f=o.ec;if(f){for(let A=0;A<f.length;A++)if(f[A](e,l,u)===!1)return}o=o.parent}if(s){Er(),Jc(s,null,10,[e,l,u]),kr();return}}EB(e,i,a,n,r)}function EB(e,t,i,n=!0,a=!1){if(a)throw e;console.error(e)}const Cn=[];let Ha=-1;const al=[];let Ys=null,Vo=0;const Ey=Promise.resolve();let Mf=null;function rh(e){const t=Mf||Ey;return e?t.then(this?e.bind(this):e):t}function kB(e){let t=Ha+1,i=Cn.length;for(;t<i;){const n=t+i>>>1,a=Cn[n],s=Xu(a);s<e||s===e&&a.flags&2?t=n+1:i=n}return t}function t1(e){if(!(e.flags&1)){const t=Xu(e),i=Cn[Cn.length-1];!i||!(e.flags&2)&&t>=Xu(i)?Cn.push(e):Cn.splice(kB(t),0,e),e.flags|=1,ky()}}function ky(){Mf||(Mf=Ey.then(Sy))}function BB(e){Ct(e)?al.push(...e):Ys&&e.id===-1?Ys.splice(Vo+1,0,e):e.flags&1||(al.push(e),e.flags|=1),ky()}function Y0(e,t,i=Ha+1){for(;i<Cn.length;i++){const n=Cn[i];if(n&&n.flags&2){if(e&&n.id!==e.uid)continue;Cn.splice(i,1),i--,n.flags&4&&(n.flags&=-2),n(),n.flags&4||(n.flags&=-2)}}}function By(e){if(al.length){const t=[...new Set(al)].sort((i,n)=>Xu(i)-Xu(n));if(al.length=0,Ys){Ys.push(...t);return}for(Ys=t,Vo=0;Vo<Ys.length;Vo++){const i=Ys[Vo];i.flags&4&&(i.flags&=-2),i.flags&8||i(),i.flags&=-2}Ys=null,Vo=0}}const Xu=e=>e.id==null?e.flags&2?-1:1/0:e.id;function Sy(e){try{for(Ha=0;Ha<Cn.length;Ha++){const t=Cn[Ha];t&&!(t.flags&8)&&(t.flags&4&&(t.flags&=-2),Jc(t,t.i,t.i?15:14),t.flags&4||(t.flags&=-2))}}finally{for(;Ha<Cn.length;Ha++){const t=Cn[Ha];t&&(t.flags&=-2)}Ha=-1,Cn.length=0,By(),Mf=null,(Cn.length||al.length)&&Sy()}}let Ki=null,Dy=null;function Lf(e){const t=Ki;return Ki=e,Dy=e&&e.type.__scopeId||null,t}function gt(e,t=Ki,i){if(!t||e._n)return e;const n=(...a)=>{n._d&&rF(-1);const s=Lf(t);let r;try{r=e(...a)}finally{Lf(s),n._d&&rF(1)}return r};return n._n=!0,n._c=!0,n._d=!0,n}function Me(e,t){if(Ki===null)return e;const i=dh(Ki),n=e.dirs||(e.dirs=[]);for(let a=0;a<t.length;a++){let[s,r,o,l=ri]=t[a];s&&(Bt(s)&&(s={mounted:s,updated:s}),s.deep&&vs(r),n.push({dir:s,instance:i,value:r,oldValue:void 0,arg:o,modifiers:l}))}return e}function Or(e,t,i,n){const a=e.dirs,s=t&&t.dirs;for(let r=0;r<a.length;r++){const o=a[r];s&&(o.oldValue=s[r].value);let l=o.dir[n];l&&(Er(),Ba(l,i,8,[e.el,o,e,t]),kr())}}const SB=Symbol("_vte"),xy=e=>e.__isTeleport,Xs=Symbol("_leaveCb"),Id=Symbol("_enterCb");function DB(){const e={isMounted:!1,isLeaving:!1,isUnmounting:!1,leavingVNodes:new Map};return Br(()=>{e.isMounted=!0}),wo(()=>{e.isUnmounting=!0}),e}const sa=[Function,Array],Ty={mode:String,appear:Boolean,persisted:Boolean,onBeforeEnter:sa,onEnter:sa,onAfterEnter:sa,onEnterCancelled:sa,onBeforeLeave:sa,onLeave:sa,onAfterLeave:sa,onLeaveCancelled:sa,onBeforeAppear:sa,onAppear:sa,onAfterAppear:sa,onAppearCancelled:sa},Iy=e=>{const t=e.subTree;return t.component?Iy(t.component):t},xB={name:"BaseTransition",props:Ty,setup(e,{slots:t}){const i=yS(),n=DB();return()=>{const a=t.default&&Ly(t.default(),!0);if(!a||!a.length)return;const s=Py(a),r=zt(e),{mode:o}=r;if(n.isLeaving)return wg(s);const l=X0(s);if(!l)return wg(s);let u=Qp(l,r,n,i,A=>u=A);l.type!==En&&Ju(l,u);let f=i.subTree&&X0(i.subTree);if(f&&f.type!==En&&!zr(l,f)&&Iy(i).type!==En){let A=Qp(f,r,n,i);if(Ju(f,A),o==="out-in"&&l.type!==En)return n.isLeaving=!0,A.afterLeave=()=>{n.isLeaving=!1,i.job.flags&8||i.update(),delete A.afterLeave,f=void 0},wg(s);o==="in-out"&&l.type!==En?A.delayLeave=(h,p,F)=>{const y=My(n,f);y[String(f.key)]=f,h[Xs]=()=>{p(),h[Xs]=void 0,delete u.delayedLeave,f=void 0},u.delayedLeave=()=>{F(),delete u.delayedLeave,f=void 0}}:f=void 0}else f&&(f=void 0);return s}}};function Py(e){let t=e[0];if(e.length>1){for(const i of e)if(i.type!==En){t=i;break}
* @vue/runtime-dom v3.5.13
* (c) 2018-present Yuxi (Evan) You and Vue contributors
* @license MIT
**/let Wp;const uF=typeof window<"u"&&window.trustedTypes;if(uF)try{Wp=uF.createPolicy("vue",{createHTML:e=>e})}catch{}const fw=Wp?e=>Wp.createHTML(e):e=>e,xS="http://www.w3.org/2000/svg",TS="http://www.w3.org/1998/Math/MathML",ms=typeof document<"u"?document:null,cF=ms&&ms.createElement("template"),IS={insert:(e,t,i)=>{t.insertBefore(e,i||null)},remove:e=>{const t=e.parentNode;t&&t.removeChild(e)},createElement:(e,t,i,n)=>{const a=t==="svg"?ms.createElementNS(xS,e):t==="mathml"?ms.createElementNS(TS,e):i?ms.createElement(e,{is:i}):ms.createElement(e);return e==="select"&&n&&n.multiple!=null&&a.setAttribute("multiple",n.multiple),a},createText:e=>ms.createTextNode(e),createComment:e=>ms.createComment(e),setText:(e,t)=>{e.nodeValue=t},setElementText:(e,t)=>{e.textContent=t},parentNode:e=>e.parentNode,nextSibling:e=>e.nextSibling,querySelector:e=>ms.querySelector(e),setScopeId(e,t){e.setAttribute(t,"")},insertStaticContent(e,t,i,n,a,s){const r=i?i.previousSibling:t.lastChild;if(a&&(a===s||a.nextSibling))for(;t.insertBefore(a.cloneNode(!0),i),!(a===s||!(a=a.nextSibling)););else{cF.innerHTML=fw(n==="svg"?`<svg>${e}</svg>`:n==="mathml"?`<math>${e}</math>`:e);const o=cF.content;if(n==="svg"||n==="mathml"){const l=o.firstChild;for(;l.firstChild;)o.appendChild(l.firstChild);o.removeChild(l)}t.insertBefore(o,i)}return[r?r.nextSibling:t.firstChild,i?i.previousSibling:t.lastChild]}},Gs="transition",ru="animation",tc=Symbol("_vtc"),Aw={name:String,type:String,css:{type:Boolean,default:!0},duration:[String,Number,Object],enterFromClass:String,enterActiveClass:String,enterToClass:String,appearFromClass:String,appearActiveClass:String,appearToClass:String,leaveFromClass:String,leaveActiveClass:String,leaveToClass:String},PS=$i({},Ty,Aw),MS=e=>(e.displayName="Transition",e.props=PS,e),fi=MS((e,{slots:t})=>ni(TB,LS(e),t)),Hr=(e,t=[])=>{Ct(e)?e.forEach(i=>i(...t)):e&&e(...t)},dF=e=>e?Ct(e)?e.some(t=>t.length>1):e.length>1:!1;function LS(e){const t={};for(const ce in e)ce in Aw||(t[ce]=e[ce]);if(e.css===!1)return t;const{name:i="v",type:n,duration:a,enterFromClass:s=`${i}-enter-from`,enterActiveClass:r=`${i}-enter-active`,enterToClass:o=`${i}-enter-to`,appearFromClass:l=s,appearActiveClass:u=r,appearToClass:f=o,leaveFromClass:A=`${i}-leave-from`,leaveActiveClass:h=`${i}-leave-active`,leaveToClass:p=`${i}-leave-to`}=e,F=RS(a),y=F&&F[0],E=F&&F[1],{onBeforeEnter:w,onEnter:C,onEnterCancelled:B,onLeave:S,onLeaveCancelled:U,onBeforeAppear:N=w,onAppear:z=C,onAppearCancelled:Q=B}=t,R=(ce,Fe,ge,le)=>{ce._enterCancelled=le,Qr(ce,Fe?f:o),Qr(ce,Fe?u:r),ge&&ge()},W=(ce,Fe)=>{ce._isLeaving=!1,Qr(ce,A),Qr(ce,p),Qr(ce,h),Fe&&Fe()},Z=ce=>(Fe,ge)=>{const le=ce?z:C,se=()=>R(Fe,ce,ge);Hr(le,[Fe,se]),fF(()=>{Qr(Fe,ce?l:s),ds(Fe,ce?f:o),dF(le)||AF(Fe,n,y,se)})};return $i(t,{onBeforeEnter(ce){Hr(w,[ce]),ds(ce,s),ds(ce,r)},onBeforeAppear(ce){Hr(N,[ce]),ds(ce,l),ds(ce,u)},onEnter:Z(!1),onAppear:Z(!0),onLeave(ce,Fe){ce._isLeaving=!0;const ge=()=>W(ce,Fe);ds(ce,A),ce._enterCancelled?(ds(ce,h),pF()):(pF(),ds(ce,h)),fF(()=>{ce._isLeaving&&(Qr(ce,A),ds(ce,p),dF(S)||AF(ce,n,E,ge))}),Hr(S,[ce,ge])},onEnterCancelled(ce){R(ce,!1,void 0,!0),Hr(B,[ce])},onAppearCancelled(ce){R(ce,!0,void 0,!0),Hr(Q,[ce])},onLeaveCancelled(ce){W(ce),Hr(U,[ce])}})}function RS(e){if(e==null)return null;if(Fi(e))return[kg(e.enter),kg(e.leave)];{const t=kg(e);return[t,t]}}function kg(e){return zk(e)}function ds(e,t){t.split(/\s+/).forEach(i=>i&&e.classList.add(i)),(e[tc]||(e[tc]=new Set)).add(t)}function Qr(e,t){t.split(/\s+/).forEach(n=>n&&e.classList.remove(n));const i=e[tc];i&&(i.delete(t),i.size||(e[tc]=void 0))}function fF(e){requestAnimationFrame(()=>{requestAnimationFrame(e)})}let US=0;function AF(e,t,i,n){const a=e._endId=++US,s=()=>{a===e._endId&&n()};if(i!=null)return setTimeout(s,i);const{type:r,timeout:o,propCount:l}=OS(e,t);if(!r)return n();const u=r+"end";let f=0;const A=()=>{e.removeEventListener(u,h),s()},h=p=>{p.target===e&&++f>=l&&A()};setTimeout(()=>{f<l&&A()},o+1),e.addEventListener(u,h)}function OS(e,t){const i=window.getComputedStyle(e),n=F=>(i[F]||"").split(", "),a=n(`${Gs
* pinia v2.3.1
* (c) 2025 Eduardo San Martin Morote
* @license MIT
*/let gw;const fh=e=>gw=e,pw=Symbol();function Kp(e){return e&&typeof e=="object"&&Object.prototype.toString.call(e)==="[object Object]"&&typeof e.toJSON!="function"}var Mu;(function(e){e.direct="direct",e.patchObject="patch object",e.patchFunction="patch function"})(Mu||(Mu={}));function u6(){const e=ny(!0),t=e.run(()=>wi({}));let i=[],n=[];const a=e1({install(s){fh(a),a._a=s,s.provide(pw,a),s.config.globalProperties.$pinia=a,n.forEach(r=>i.push(r)),n=[]},use(s){return this._a?i.push(s):n.push(s),this},_p:i,_a:null,_e:e,_s:new Map,state:t});return a}const mw=()=>{};function SF(e,t,i,n=mw){e.push(t);const a=()=>{const s=e.indexOf(t);s>-1&&(e.splice(s,1),n())};return!i&&ay()&&Jk(a),a}function Uo(e,...t){e.slice().forEach(i=>{i(...t)})}const c6=e=>e(),DF=Symbol(),Tg=Symbol();function Yp(e,t){e instanceof Map&&t instanceof Map?t.forEach((i,n)=>e.set(n,i)):e instanceof Set&&t instanceof Set&&t.forEach(e.add,e);for(const i in t){if(!t.hasOwnProperty(i))continue;const n=t[i],a=e[i];Kp(a)&&Kp(n)&&e.hasOwnProperty(i)&&!Ui(n)&&!lr(n)?e[i]=Yp(a,n):e[i]=n}return e}const d6=Symbol();function f6(e){return!Kp(e)||!e.hasOwnProperty(d6)}const{assign:Vs}=Object;function A6(e){return!!(Ui(e)&&e.effect)}function h6(e,t,i,n){const{state:a,actions:s,getters:r}=t,o=i.state.value[e];let l;function u(){o||(i.state.value[e]=a?a():{});const f=Cy(i.state.value[e]);return Vs(f,s,Object.keys(r||{}).reduce((A,h)=>(A[h]=e1(Ri(()=>{fh(i);const p=i._s.get(e);return r[h].call(p,p)})),A),{}))}return l=_w(e,u,t,i,n,!0),l}function _w(e,t,i={},n,a,s){let r;const o=Vs({actions:{}},i),l={deep:!0};let u,f,A=[],h=[],p;const F=n.state.value[e];!s&&!F&&(n.state.value[e]={}),wi({});let y;function E(Q){let R;u=f=!1,typeof Q=="function"?(Q(n.state.value[e]),R={type:Mu.patchFunction,storeId:e,events:p}):(Yp(n.state.value[e],Q),R={type:Mu.patchObject,payload:Q,storeId:e,events:p});const W=y=Symbol();rh().then(()=>{y===W&&(u=!0)}),f=!0,Uo(A,R,n.state.value[e])}const w=s?function(){const{state:R}=i,W=R?R():{};this.$patch(Z=>{Vs(Z,W)})}:mw;function C(){r.stop(),A=[],h=[],n._s.delete(e)}const B=(Q,R="")=>{if(DF in Q)return Q[Tg]=R,Q;const W=function(){fh(n);const Z=Array.from(arguments),ce=[],Fe=[];function ge(me){ce.push(me)}function le(me){Fe.push(me)}Uo(h,{args:Z,name:W[Tg],store:U,after:ge,onError:le});let se;try{se=Q.apply(this&&this.$id===e?this:U,Z)}catch(me){throw Uo(Fe,me),me}return se instanceof Promise?se.then(me=>(Uo(ce,me),me)).catch(me=>(Uo(Fe,me),Promise.reject(me))):(Uo(ce,se),se)};return W[DF]=!0,W[Tg]=R,W},S={_p:n,$id:e,$onAction:SF.bind(null,h),$patch:E,$reset:w,$subscribe(Q,R={}){const W=SF(A,Q,R.detached,()=>Z()),Z=r.run(()=>ur(()=>n.state.value[e],ce=>{(R.flush==="sync"?f:u)&&Q({storeId:e,type:Mu.direct,events:p},ce)},Vs({},l,R)));return W},$dispose:C},U=Xc(S);n._s.set(e,U);const z=(n._a&&n._a.runWithContext||c6)(()=>n._e.run(()=>(r=ny()).run(()=>t({action:B}))));for(const Q in z){const R=z[Q];if(Ui(R)&&!A6(R)||lr(R))s||(F&&f6(R)&&(Ui(R)?R.value=F[Q]:Yp(R,F[Q])),n.state.value[e][Q]=R);else if(typeof R=="function"){const W=B(R,Q);z[Q]=W,o.actions[Q]=R}}return Vs(U,z),Vs(zt(U),z),Object.defineProperty(U,"$state",{get:()=>n.state.value[e],set:Q=>{E(R=>{Vs(R,Q)})}}),n._p.forEach(Q=>{Vs(U,r.run(()=>Q({store:U,app:n._a,pinia:n,options:o})))}),F&&s&&i.hydrate&&i.hydrate(U.$state,F),u=!0,f=!0,U}/*! #__NO_SIDE_EFFECTS__ */function ei(e,t,i){let n,a;const s=typeof t=="function";typeof e=="string"?(n=e,a=s?i:t):(a=e,n=e.id);function r(o,l){const u=YB();return o=o||(u?ka(pw,null):null),o&&fh(o),o=gw,o._s.has(n)||(s?_w(n,t,a,o):h6(n,a,o)),o._s.get(n)}return r.$id=n,r}function J(e,t){return Array.isArray(t)?t.reduce((i,n)=>(i[n]=function(){return e(this.$pinia)[n]},i),{}):Object.keys(t).reduce((i,n)=>(i[n]=function(){const a=e(this.$pinia),s=t[n];return typeof s=="function"?s.call(this,a):a[s]},i),{})}function Te(e,t){return Array.isArray(t)?t.reduce((i,n)=>(i[n]=function(...a){return e(this.$pinia)[n](...a)},i),{}):Object.keys(t).reduce((i,n)=>(i[n]=function(...a){return e(this.$pinia)[t[n]](...a)},i),{})}const oe=ei("phone",{state:()=>({show:!1,notifyshow:!1,la
`).forEach(function(r){a=r.indexOf(":"),i=r.substring(0,a).trim().toLowerCase(),n=r.substring(a+1).trim(),!(!i||t[i]&&px[i])&&(i==="set-cookie"?t[i]?t[i].push(n):t[i]=[n]:t[i]=t[i]?t[i]+", "+n:n)}),t},VF=Symbol("internals");function cu(e){return e&&String(e).trim().toLowerCase()}function bf(e){return e===!1||e==null?e:Oe.isArray(e)?e.map(bf):String(e)}function _x(e){const t=Object.create(null),i=/([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g;let n;for(;n=i.exec(e);)t[n[1]]=n[2];return t}const Fx=e=>/^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(e.trim());function Mg(e,t,i,n,a){if(Oe.isFunction(n))return n.call(this,t,i);if(a&&(t=i),!!Oe.isString(t)){if(Oe.isString(n))return t.indexOf(n)!==-1;if(Oe.isRegExp(n))return n.test(t)}}function bx(e){return e.trim().toLowerCase().replace(/([a-z\d])(\w*)/g,(t,i,n)=>i.toUpperCase()+n)}function vx(e,t){const i=Oe.toCamelCase(" "+t);["get","set","has"].forEach(n=>{Object.defineProperty(e,n+i,{value:function(a,s,r){return this[n].call(this,t,a,s,r)},configurable:!0})})}let On=class{constructor(t){t&&this.set(t)}set(t,i,n){const a=this;function s(o,l,u){const f=cu(l);if(!f)throw new Error("header name must be a non-empty string");const A=Oe.findKey(a,f);(!A||a[A]===void 0||u===!0||u===void 0&&a[A]!==!1)&&(a[A||l]=bf(o))}const r=(o,l)=>Oe.forEach(o,(u,f)=>s(u,f,l));if(Oe.isPlainObject(t)||t instanceof this.constructor)r(t,i);else if(Oe.isString(t)&&(t=t.trim())&&!Fx(t))r(mx(t),i);else if(Oe.isHeaders(t))for(const[o,l]of t.entries())s(l,o,n);else t!=null&&s(i,t,n);return this}get(t,i){if(t=cu(t),t){const n=Oe.findKey(this,t);if(n){const a=this[n];if(!i)return a;if(i===!0)return _x(a);if(Oe.isFunction(i))return i.call(this,a,n);if(Oe.isRegExp(i))return i.exec(a);throw new TypeError("parser must be boolean|regexp|function")}}}has(t,i){if(t=cu(t),t){const n=Oe.findKey(this,t);return!!(n&&this[n]!==void 0&&(!i||Mg(this,this[n],n,i)))}return!1}delete(t,i){const n=this;let a=!1;function s(r){if(r=cu(r),r){const o=Oe.findKey(n,r);o&&(!i||Mg(n,n[o],o,i))&&(delete n[o],a=!0)}}return Oe.isArray(t)?t.forEach(s):s(t),a}clear(t){const i=Object.keys(this);let n=i.length,a=!1;for(;n--;){const s=i[n];(!t||Mg(this,this[s],s,t,!0))&&(delete this[s],a=!0)}return a}normalize(t){const i=this,n={};return Oe.forEach(this,(a,s)=>{const r=Oe.findKey(n,s);if(r){i[r]=bf(a),delete i[s];return}const o=t?bx(s):String(s).trim();o!==s&&delete i[s],i[o]=bf(a),n[o]=!0}),this}concat(...t){return this.constructor.concat(this,...t)}toJSON(t){const i=Object.create(null);return Oe.forEach(this,(n,a)=>{n!=null&&n!==!1&&(i[a]=t&&Oe.isArray(n)?n.join(", "):n)}),i}[Symbol.iterator](){return Object.entries(this.toJSON())[Symbol.iterator]()}toString(){return Object.entries(this.toJSON()).map(([t,i])=>t+": "+i).join(`
`)}get[Symbol.toStringTag](){return"AxiosHeaders"}static from(t){return t instanceof this?t:new this(t)}static concat(t,...i){const n=new this(t);return i.forEach(a=>n.set(a)),n}static accessor(t){const n=(this[VF]=this[VF]={accessors:{}}).accessors,a=this.prototype;function s(r){const o=cu(r);n[o]||(vx(a,r),n[o]=!0)}return Oe.isArray(t)?t.forEach(s):s(t),this}};On.accessor(["Content-Type","Content-Length","Accept","Accept-Encoding","User-Agent","Authorization"]);Oe.reduceDescriptors(On.prototype,({value:e},t)=>{let i=t[0].toUpperCase()+t.slice(1);return{get:()=>e,set(n){this[i]=n}}});Oe.freezeMethods(On);function Lg(e,t){const i=this||ed,n=t||i,a=On.from(n.headers);let s=n.data;return Oe.forEach(e,function(o){s=o.call(i,s,a.normalize(),t?t.status:void 0)}),a.normalize(),s}function qw(e){return!!(e&&e.__CANCEL__)}function Nl(e,t,i){Tt.call(this,e??"canceled",Tt.ERR_CANCELED,t,i),this.name="CanceledError"}Oe.inherits(Nl,Tt,{__CANCEL__:!0});function Vw(e,t,i){const n=i.config.validateStatus;!i.status||!n||n(i.status)?e(i):t(new Tt("Request failed with status code "+i.status,[Tt.ERR_BAD_REQUEST,Tt.ERR_BAD_RESPONSE][Math.floor(i.status/100)-4],i.config,i.request,i))}function yx(e){const t=/^([-+\w]{1,25})(:?\/\/|:)/.exec(e);return t&&t[1]||""}function wx(e,t){e=e||10;const i=new Array(e),n=new Array(e);let a=0,s=0,r;return t=t!==void 0?t:1e3,function(l){const u=Date.now(),f=n[s];r||(r=u),i[a]=l,n[a]=u;let A=s,h=0;for(;A!==a;)h+=i[A++],A=A%e;if(a=(a+1)%e,a===s&&(s=(s+1)%e),u-r<t)return;const p=f&&u-f;return p?Math.round(h*1e3/p):void 0}}function Cx(e,t){let i=0,n=1e3/t,a,s;const r=(u,f=Date.now())=>{i=f,a=null,s&&(clearTimeout(s),s=null),e.apply(null,u)};return[(...u)=>{const f=Date.now(),A=f-i;A>=n?r(u,f):(a=u,s||(s=setTimeout(()=>{s=null,r(a)},n-A)))},()=>a&&r(a)]}const Qf=(e,t,i=3)=>{let n=0;const a=wx(50,250);return Cx(s=>{const r=s.loaded,o=s.lengthComputable?s.total:void 0,l=r-n,u=a(l),f=r<=o;n=r;const A={loaded:r,total:o,progress:o?r/o:void 0,bytes:l,rate:u||void 0,estimated:u&&o&&f?(o-r)/u:void 0,event:s,lengthComputable:o!=null,[t?"download":"upload"]:!0};e(A)},i)},WF=(e,t)=>{const i=e!=null;return[n=>t[0]({lengthComputable:i,total:e,loaded:n}),t[1]]},KF=e=>(...t)=>Oe.asap(()=>e(...t)),Ex=hn.hasStandardBrowserEnv?((e,t)=>i=>(i=new URL(i,hn.origin),e.protocol===i.protocol&&e.host===i.host&&(t||e.port===i.port)))(new URL(hn.origin),hn.navigator&&/(msie|trident)/i.test(hn.navigator.userAgent)):()=>!0,kx=hn.hasStandardBrowserEnv?{write(e,t,i,n,a,s){const r=[e+"="+encodeURIComponent(t)];Oe.isNumber(i)&&r.push("expires="+new Date(i).toGMTString()),Oe.isString(n)&&r.push("path="+n),Oe.isString(a)&&r.push("domain="+a),s===!0&&r.push("secure"),document.cookie=r.join("; ")},read(e){const t=document.cookie.match(new RegExp("(^|;\\s*)("+e+")=([^;]*)"));return t?decodeURIComponent(t[3]):null},remove(e){this.write(e,"",Date.now()-864e5)}}:{write(){},read(){return null},remove(){}};function Bx(e){return/^([a-z][a-z\d+\-.]*:)?\/\//i.test(e)}function Sx(e,t){return t?e.replace(/\/?\/$/,"")+"/"+t.replace(/^\/+/,""):e}function Ww(e,t,i){let n=!Bx(t);return e&&(n||i==!1)?Sx(e,t):t}const YF=e=>e instanceof On?{...e}:e;function lo(e,t){t=t||{};const i={};function n(u,f,A,h){return Oe.isPlainObject(u)&&Oe.isPlainObject(f)?Oe.merge.call({caseless:h},u,f):Oe.isPlainObject(f)?Oe.merge({},f):Oe.isArray(f)?f.slice():f}function a(u,f,A,h){if(Oe.isUndefined(f)){if(!Oe.isUndefined(u))return n(void 0,u,A,h)}else return n(u,f,A,h)}function s(u,f){if(!Oe.isUndefined(f))return n(void 0,f)}function r(u,f){if(Oe.isUndefined(f)){if(!Oe.isUndefined(u))return n(void 0,u)}else return n(void 0,f)}function o(u,f,A){if(A in t)return n(u,f);if(A in e)return n(void 0,u)}const l={url:s,method:s,data:s,baseURL:r,transformRequest:r,transformResponse:r,paramsSerializer:r,timeout:r,timeoutMessage:r,withCredentials:r,withXSRFToken:r,adapter:r,responseType:r,xsrfCookieName:r,xsrfHeaderName:r,onUploadProgress:r,onDownloadProgress:r,decompress:r,maxContentLength:r,maxBodyLength:r,beforeRedirect:r,transport:r,httpAgent:r,httpsAgent:r,cancelToken:r,socketPath:r,responseEn
`+s.map(ZF).join(`
`):" "+ZF(s[0]):"as no adapter specified";throw new Tt("There is no suitable adapter to dispatch the request "+r,"ERR_NOT_SUPPORT")}return n},adapters:am};function Rg(e){if(e.cancelToken&&e.cancelToken.throwIfRequested(),e.signal&&e.signal.aborted)throw new Nl(null,e)}function $F(e){return Rg(e),e.headers=On.from(e.headers),e.data=Lg.call(e,e.transformRequest),["post","put","patch"].indexOf(e.method)!==-1&&e.headers.setContentType("application/x-www-form-urlencoded",!1),Jw.getAdapter(e.adapter||ed.adapter)(e).then(function(n){return Rg(e),n.data=Lg.call(e,e.transformResponse,n),n.headers=On.from(n.headers),n},function(n){return qw(n)||(Rg(e),n&&n.response&&(n.response.data=Lg.call(e,e.transformResponse,n.response),n.response.headers=On.from(n.response.headers))),Promise.reject(n)})}const Zw="1.8.4",bh={};["object","boolean","number","function","string","symbol"].forEach((e,t)=>{bh[e]=function(n){return typeof n===e||"a"+(t<1?"n ":" ")+e}});const eb={};bh.transitional=function(t,i,n){function a(s,r){return"[Axios v"+Zw+"] Transitional option '"+s+"'"+r+(n?". "+n:"")}return(s,r,o)=>{if(t===!1)throw new Tt(a(r," has been removed"+(i?" in "+i:"")),Tt.ERR_DEPRECATED);return i&&!eb[r]&&(eb[r]=!0,console.warn(a(r," has been deprecated since v"+i+" and will be removed in the near future"))),t?t(s,r,o):!0}};bh.spelling=function(t){return(i,n)=>(console.warn(`${n} is likely a misspelling of ${t}`),!0)};function Qx(e,t,i){if(typeof e!="object")throw new Tt("options must be an object",Tt.ERR_BAD_OPTION_VALUE);const n=Object.keys(e);let a=n.length;for(;a-- >0;){const s=n[a],r=t[s];if(r){const o=e[s],l=o===void 0||r(o,s,e);if(l!==!0)throw new Tt("option "+s+" must be "+l,Tt.ERR_BAD_OPTION_VALUE);continue}if(i!==!0)throw new Tt("Unknown option "+s,Tt.ERR_BAD_OPTION)}}const vf={assertOptions:Qx,validators:bh},Oa=vf.validators;let io=class{constructor(t){this.defaults=t,this.interceptors={request:new qF,response:new qF}}async request(t,i){try{return await this._request(t,i)}catch(n){if(n instanceof Error){let a={};Error.captureStackTrace?Error.captureStackTrace(a):a=new Error;const s=a.stack?a.stack.replace(/^.+\n/,""):"";try{n.stack?s&&!String(n.stack).endsWith(s.replace(/^.+\n.+\n/,""))&&(n.stack+=`
`+s):n.stack=s}catch{}}throw n}}_request(t,i){typeof t=="string"?(i=i||{},i.url=t):i=t||{},i=lo(this.defaults,i);const{transitional:n,paramsSerializer:a,headers:s}=i;n!==void 0&&vf.assertOptions(n,{silentJSONParsing:Oa.transitional(Oa.boolean),forcedJSONParsing:Oa.transitional(Oa.boolean),clarifyTimeoutError:Oa.transitional(Oa.boolean)},!1),a!=null&&(Oe.isFunction(a)?i.paramsSerializer={serialize:a}:vf.assertOptions(a,{encode:Oa.function,serialize:Oa.function},!0)),i.allowAbsoluteUrls!==void 0||(this.defaults.allowAbsoluteUrls!==void 0?i.allowAbsoluteUrls=this.defaults.allowAbsoluteUrls:i.allowAbsoluteUrls=!0),vf.assertOptions(i,{baseUrl:Oa.spelling("baseURL"),withXsrfToken:Oa.spelling("withXSRFToken")},!0),i.method=(i.method||this.defaults.method||"get").toLowerCase();let r=s&&Oe.merge(s.common,s[i.method]);s&&Oe.forEach(["delete","get","head","post","put","patch","common"],F=>{delete s[F]}),i.headers=On.concat(r,s);const o=[];let l=!0;this.interceptors.request.forEach(function(y){typeof y.runWhen=="function"&&y.runWhen(i)===!1||(l=l&&y.synchronous,o.unshift(y.fulfilled,y.rejected))});const u=[];this.interceptors.response.forEach(function(y){u.push(y.fulfilled,y.rejected)});let f,A=0,h;if(!l){const F=[$F.bind(this),void 0];for(F.unshift.apply(F,o),F.push.apply(F,u),h=F.length,f=Promise.resolve(i);A<h;)f=f.then(F[A++],F[A++]);return f}h=o.length;let p=i;for(A=0;A<h;){const F=o[A++],y=o[A++];try{p=F(p)}catch(E){y.call(this,E);break}}try{f=$F.call(this,p)}catch(F){return Promise.reject(F)}for(A=0,h=u.length;A<h;)f=f.then(u[A++],u[A++]);return f}getUri(t){t=lo(this.defaults,t);const i=Ww(t.baseURL,t.url,t.allowAbsoluteUrls);return jw(i,t.params,t.paramsSerializer)}};Oe.forEach(["delete","get","head","options"],function(t){io.prototype[t]=function(i,n){return this.request(lo(n||{},{method:t,url:i,data:(n||{}).data}))}});Oe.forEach(["post","put","patch"],function(t){function i(n){return function(s,r,o){return this.request(lo(o||{},{method:t,headers:n?{"Content-Type":"multipart/form-data"}:{},url:s,data:r}))}}io.prototype[t]=i(),io.prototype[t+"Form"]=i(!0)});let jx=class $w{constructor(t){if(typeof t!="function")throw new TypeError("executor must be a function.");let i;this.promise=new Promise(function(s){i=s});const n=this;this.promise.then(a=>{if(!n._listeners)return;let s=n._listeners.length;for(;s-- >0;)n._listeners[s](a);n._listeners=null}),this.promise.then=a=>{let s;const r=new Promise(o=>{n.subscribe(o),s=o}).then(a);return r.cancel=function(){n.unsubscribe(s)},r},t(function(s,r,o){n.reason||(n.reason=new Nl(s,r,o),i(n.reason))})}throwIfRequested(){if(this.reason)throw this.reason}subscribe(t){if(this.reason){t(this.reason);return}this._listeners?this._listeners.push(t):this._listeners=[t]}unsubscribe(t){if(!this._listeners)return;const i=this._listeners.indexOf(t);i!==-1&&this._listeners.splice(i,1)}toAbortSignal(){const t=new AbortController,i=n=>{t.abort(n)};return this.subscribe(i),t.signal.unsubscribe=()=>this.unsubscribe(i),t.signal}static source(){let t;return{token:new $w(function(a){t=a}),cancel:t}}};function Gx(e){return function(i){return e.apply(null,i)}}function zx(e){return Oe.isObject(e)&&e.isAxiosError===!0}const sm={Continue:100,SwitchingProtocols:101,Processing:102,EarlyHints:103,Ok:200,Created:201,Accepted:202,NonAuthoritativeInformation:203,NoContent:204,ResetContent:205,PartialContent:206,MultiStatus:207,AlreadyReported:208,ImUsed:226,MultipleChoices:300,MovedPermanently:301,Found:302,SeeOther:303,NotModified:304,UseProxy:305,Unused:306,TemporaryRedirect:307,PermanentRedirect:308,BadRequest:400,Unauthorized:401,PaymentRequired:402,Forbidden:403,NotFound:404,MethodNotAllowed:405,NotAcceptable:406,ProxyAuthenticationRequired:407,RequestTimeout:408,Conflict:409,Gone:410,LengthRequired:411,PreconditionFailed:412,PayloadTooLarge:413,UriTooLong:414,UnsupportedMediaType:415,RangeNotSatisfiable:416,ExpectationFailed:417,ImATeapot:418,MisdirectedRequest:421,UnprocessableEntity:422,Locked:423,FailedDependency:424,TooEarly:425,UpgradeRequired:426,PreconditionRequired:428,TooManyRequests:429,RequestHeaderFie
* vue-router v4.5.0
* (c) 2024 Eduardo San Martin Morote
* @license MIT
*/const Ko=typeof document<"u";function t3(e){return typeof e=="object"||"displayName"in e||"props"in e||"__vccOpts"in e}function qx(e){return e.__esModule||e[Symbol.toStringTag]==="Module"||e.default&&t3(e.default)}const Jt=Object.assign;function Ug(e,t){const i={};for(const n in t){const a=t[n];i[n]=Sa(a)?a.map(e):e(a)}return i}const Lu=()=>{},Sa=Array.isArray,i3=/#/g,Vx=/&/g,Wx=/\//g,Kx=/=/g,Yx=/\?/g,n3=/\+/g,Xx=/%5B/g,Jx=/%5D/g,a3=/%5E/g,Zx=/%60/g,s3=/%7B/g,$x=/%7C/g,r3=/%7D/g,eT=/%20/g;function f1(e){return encodeURI(""+e).replace($x,"|").replace(Xx,"[").replace(Jx,"]")}function tT(e){return f1(e).replace(s3,"{").replace(r3,"}").replace(a3,"^")}function rm(e){return f1(e).replace(n3,"%2B").replace(eT,"+").replace(i3,"%23").replace(Vx,"%26").replace(Zx,"`").replace(s3,"{").replace(r3,"}").replace(a3,"^")}function iT(e){return rm(e).replace(Kx,"%3D")}function nT(e){return f1(e).replace(i3,"%23").replace(Yx,"%3F")}function aT(e){return e==null?"":nT(e).replace(Wx,"%2F")}function ac(e){try{return decodeURIComponent(""+e)}catch{}return""+e}const sT=/\/$/,rT=e=>e.replace(sT,"");function Og(e,t,i="/"){let n,a={},s="",r="";const o=t.indexOf("#");let l=t.indexOf("?");return o<l&&o>=0&&(l=-1),l>-1&&(n=t.slice(0,l),s=t.slice(l+1,o>-1?o:t.length),a=e(s)),o>-1&&(n=n||t.slice(0,o),r=t.slice(o,t.length)),n=cT(n??t,i),{fullPath:n+(s&&"?")+s+r,path:n,query:a,hash:ac(r)}}function oT(e,t){const i=t.query?e(t.query):"";return t.path+(i&&"?")+i+(t.hash||"")}function tb(e,t){return!t||!e.toLowerCase().startsWith(t.toLowerCase())?e:e.slice(t.length)||"/"}function lT(e,t,i){const n=t.matched.length-1,a=i.matched.length-1;return n>-1&&n===a&&Al(t.matched[n],i.matched[a])&&o3(t.params,i.params)&&e(t.query)===e(i.query)&&t.hash===i.hash}function Al(e,t){return(e.aliasOf||e)===(t.aliasOf||t)}function o3(e,t){if(Object.keys(e).length!==Object.keys(t).length)return!1;for(const i in e)if(!uT(e[i],t[i]))return!1;return!0}function uT(e,t){return Sa(e)?ib(e,t):Sa(t)?ib(t,e):e===t}function ib(e,t){return Sa(t)?e.length===t.length&&e.every((i,n)=>i===t[n]):e.length===1&&e[0]===t}function cT(e,t){if(e.startsWith("/"))return e;if(!e)return t;const i=t.split("/"),n=e.split("/"),a=n[n.length-1];(a===".."||a===".")&&n.push("");let s=i.length-1,r,o;for(r=0;r<n.length;r++)if(o=n[r],o!==".")if(o==="..")s>1&&s--;else break;return i.slice(0,s).join("/")+"/"+n.slice(r).join("/")}const zs={path:"/",name:void 0,params:{},query:{},hash:"",fullPath:"/",matched:[],meta:{},redirectedFrom:void 0};var sc;(function(e){e.pop="pop",e.push="push"})(sc||(sc={}));var Ru;(function(e){e.back="back",e.forward="forward",e.unknown=""})(Ru||(Ru={}));function dT(e){if(!e)if(Ko){const t=document.querySelector("base");e=t&&t.getAttribute("href")||"/",e=e.replace(/^\w+:\/\/[^\/]+/,"")}else e="/";return e[0]!=="/"&&e[0]!=="#"&&(e="/"+e),rT(e)}const fT=/^[^#]+#/;function AT(e,t){return e.replace(fT,"#")+t}function hT(e,t){const i=document.documentElement.getBoundingClientRect(),n=e.getBoundingClientRect();return{behavior:t.behavior,left:n.left-i.left-(t.left||0),top:n.top-i.top-(t.top||0)}}const vh=()=>({left:window.scrollX,top:window.scrollY});function gT(e){let t;if("el"in e){const i=e.el,n=typeof i=="string"&&i.startsWith("#"),a=typeof i=="string"?n?document.getElementById(i.slice(1)):document.querySelector(i):i;if(!a)return;t=hT(a,e)}else t=e;"scrollBehavior"in document.documentElement.style?window.scrollTo(t):window.scrollTo(t.left!=null?t.left:window.scrollX,t.top!=null?t.top:window.scrollY)}function nb(e,t){return(history.state?history.state.position-t:-1)+e}const om=new Map;function pT(e,t){om.set(e,t)}function mT(e){const t=om.get(e);return om.delete(e),t}let _T=()=>location.protocol+"//"+location.host;function l3(e,t){const{pathname:i,search:n,hash:a}=t,s=e.indexOf("#");if(s>-1){let o=a.includes(e.slice(s))?e.slice(s).length:1,l=a.slice(o);return l[0]!=="/"&&(l="/"+l),tb(l,"")}return tb(i,e)+n+a}function FT(e,t,i,n){let a=[],s=[],r=null;const o=({state:h})=>{const p=l3(e,location),F=i.value,y=t.value;let E=0;if(h){if(i.value=p,t.value=h,r&&r===F){r=null;return}E=y?h.position
#ifdef USE_ALPHAMAP
diffuseColor.a *= texture2D( alphaMap, vUv ).g;
#endif
`,_Q=`
#ifdef USE_ALPHAMAP
uniform sampler2D alphaMap;
#endif
`,FQ=`
#ifdef ALPHATEST
if ( diffuseColor.a < ALPHATEST ) discard;
#endif
`,bQ=`
#ifdef USE_AOMAP
// reads channel R, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
float ambientOcclusion = ( texture2D( aoMap, vUv2 ).r - 1.0 ) * aoMapIntensity + 1.0;
reflectedLight.indirectDiffuse *= ambientOcclusion;
#if defined( USE_ENVMAP ) && defined( PHYSICAL )
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
reflectedLight.indirectSpecular *= computeSpecularOcclusion( dotNV, ambientOcclusion, material.specularRoughness );
#endif
#endif
`,vQ=`
#ifdef USE_AOMAP
uniform sampler2D aoMap;
uniform float aoMapIntensity;
#endif
`,yQ=`
vec3 transformed = vec3( position );
`,wQ=`
vec3 objectNormal = vec3( normal );
`,CQ=`
float punctualLightIntensityToIrradianceFactor( const in float lightDistance, const in float cutoffDistance, const in float decayExponent ) {
#if defined ( PHYSICALLY_CORRECT_LIGHTS )
// based upon Frostbite 3 Moving to Physically-based Rendering
// page 32, equation 26: E[window1]
// https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
// this is intended to be used on spot and point lights who are represented as luminous intensity
// but who must be converted to luminous irradiance for surface lighting calculation
float distanceFalloff = 1.0 / max( pow( lightDistance, decayExponent ), 0.01 );
if( cutoffDistance > 0.0 ) {
distanceFalloff *= pow2( saturate( 1.0 - pow4( lightDistance / cutoffDistance ) ) );
}
return distanceFalloff;
#else
if( cutoffDistance > 0.0 && decayExponent > 0.0 ) {
return pow( saturate( -lightDistance / cutoffDistance + 1.0 ), decayExponent );
}
return 1.0;
#endif
}
vec3 BRDF_Diffuse_Lambert( const in vec3 diffuseColor ) {
return RECIPROCAL_PI * diffuseColor;
} // validated
vec3 F_Schlick( const in vec3 specularColor, const in float dotLH ) {
// Original approximation by Christophe Schlick '94
// float fresnel = pow( 1.0 - dotLH, 5.0 );
// Optimized variant (presented by Epic at SIGGRAPH '13)
// https://cdn2.unrealengine.com/Resources/files/2013SiggraphPresentationsNotes-26915738.pdf
float fresnel = exp2( ( -5.55473 * dotLH - 6.98316 ) * dotLH );
return ( 1.0 - specularColor ) * fresnel + specularColor;
} // validated
// Microfacet Models for Refraction through Rough Surfaces - equation (34)
// http://graphicrants.blogspot.com/2013/08/specular-brdf-reference.html
// alpha is "roughness squared" in Disneys reparameterization
float G_GGX_Smith( const in float alpha, const in float dotNL, const in float dotNV ) {
// geometry term (normalized) = G(l)⋅G(v) / 4(n⋅l)(n⋅v)
// also see #12151
float a2 = pow2( alpha );
float gl = dotNL + sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );
float gv = dotNV + sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );
return 1.0 / ( gl * gv );
} // validated
// Moving Frostbite to Physically Based Rendering 3.0 - page 12, listing 2
// https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
float G_GGX_SmithCorrelated( const in float alpha, const in float dotNL, const in float dotNV ) {
float a2 = pow2( alpha );
// dotNL and dotNV are explicitly swapped. This is not a mistake.
float gv = dotNL * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );
float gl = dotNV * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );
return 0.5 / max( gv + gl, EPSILON );
}
// Microfacet Models for Refraction through Rough Surfaces - equation (33)
// http://graphicrants.blogspot.com/2013/08/specular-brdf-reference.html
// alpha is "roughness squared" in Disneys reparameterization
float D_GGX( const in float alpha, const in float dotNH ) {
float a2 = pow2( alpha );
float denom = pow2( dotNH ) * ( a2 - 1.0 ) + 1.0; // avoid alpha = 0 with dotNH = 1
return RECIPROCAL_PI * a2 / pow2( denom );
}
// GGX Distribution, Schlick Fresnel, GGX-Smith Visibility
vec3 BRDF_Specular_GGX( const in IncidentLight incidentLight, const in GeometricContext geometry, const in vec3 specularColor, const in float roughness ) {
float alpha = pow2( roughness ); // UE4's roughness
vec3 halfDir = normalize( incidentLight.direction + geometry.viewDir );
float dotNL = saturate( dot( geometry.normal, incidentLight.direction ) );
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
float dotNH = saturate( dot( geometry.normal, halfDir ) );
float dotLH = saturate( dot( incidentLight.direction, halfDir ) );
vec3 F = F_Schlick( specularColor, dotLH );
float G = G_GGX_SmithCorrelated( alpha, dotNL, dotNV );
float D = D_GGX( alpha, dotNH );
return F * ( G * D );
} // validated
// Rect Area Light
// Real-Time Polygonal-Light Shading with Linearly Transformed Cosines
// by Eric Heitz, Jonathan Dupuy, Stephen Hill and David Neubelt
// code: https://github.com/selfshadow/ltc_code/
vec2 LTC_Uv( const in vec3 N, const in vec3 V, const in float roughness ) {
const float LUT_SIZE = 64.0;
const float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;
const float LUT_BIAS = 0.5 / LUT_SIZE;
float dotNV = saturate( dot( N, V ) );
// texture parameterized by sqrt( GGX alpha ) and sqrt( 1 - cos( theta ) )
vec2 uv = vec2( roughness, sqrt( 1.0 - dotNV ) );
uv = uv * LUT_SCALE + LUT_BIAS;
return uv;
}
float LTC_ClippedSphereFormFactor( const in vec3 f ) {
// Real-Time Area Lighting: a Journey from Research to Production (p.102)
// An approximation of the form factor of a horizon-clipped rectangle.
float l = length( f );
return max( ( l * l + f.z ) / ( l + 1.0 ), 0.0 );
}
vec3 LTC_EdgeVectorFormFactor( const in vec3 v1, const in vec3 v2 ) {
float x = dot( v1, v2 );
float y = abs( x );
// rational polynomial approximation to theta / sin( theta ) / 2PI
float a = 0.8543985 + ( 0.4965155 + 0.0145206 * y ) * y;
float b = 3.4175940 + ( 4.1616724 + y ) * y;
float v = a / b;
float theta_sintheta = ( x > 0.0 ) ? v : 0.5 * inversesqrt( max( 1.0 - x * x, 1e-7 ) ) - v;
return cross( v1, v2 ) * theta_sintheta;
}
vec3 LTC_Evaluate( const in vec3 N, const in vec3 V, const in vec3 P, const in mat3 mInv, const in vec3 rectCoords[ 4 ] ) {
// bail if point is on back side of plane of light
// assumes ccw winding order of light vertices
vec3 v1 = rectCoords[ 1 ] - rectCoords[ 0 ];
vec3 v2 = rectCoords[ 3 ] - rectCoords[ 0 ];
vec3 lightNormal = cross( v1, v2 );
if( dot( lightNormal, P - rectCoords[ 0 ] ) < 0.0 ) return vec3( 0.0 );
// construct orthonormal basis around N
vec3 T1, T2;
T1 = normalize( V - N * dot( V, N ) );
T2 = - cross( N, T1 ); // negated from paper; possibly due to a different handedness of world coordinate system
// compute transform
mat3 mat = mInv * transposeMat3( mat3( T1, T2, N ) );
// transform rect
vec3 coords[ 4 ];
coords[ 0 ] = mat * ( rectCoords[ 0 ] - P );
coords[ 1 ] = mat * ( rectCoords[ 1 ] - P );
coords[ 2 ] = mat * ( rectCoords[ 2 ] - P );
coords[ 3 ] = mat * ( rectCoords[ 3 ] - P );
// project rect onto sphere
coords[ 0 ] = normalize( coords[ 0 ] );
coords[ 1 ] = normalize( coords[ 1 ] );
coords[ 2 ] = normalize( coords[ 2 ] );
coords[ 3 ] = normalize( coords[ 3 ] );
// calculate vector form factor
vec3 vectorFormFactor = vec3( 0.0 );
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 0 ], coords[ 1 ] );
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 1 ], coords[ 2 ] );
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 2 ], coords[ 3 ] );
vectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 3 ], coords[ 0 ] );
// adjust for horizon clipping
float result = LTC_ClippedSphereFormFactor( vectorFormFactor );
/*
// alternate method of adjusting for horizon clipping (see referece)
// refactoring required
float len = length( vectorFormFactor );
float z = vectorFormFactor.z / len;
const float LUT_SIZE = 64.0;
const float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;
const float LUT_BIAS = 0.5 / LUT_SIZE;
// tabulated horizon-clipped sphere, apparently...
vec2 uv = vec2( z * 0.5 + 0.5, len );
uv = uv * LUT_SCALE + LUT_BIAS;
float scale = texture2D( ltc_2, uv ).w;
float result = len * scale;
*/
return vec3( result );
}
// End Rect Area Light
// ref: https://www.unrealengine.com/blog/physically-based-shading-on-mobile - environmentBRDF for GGX on mobile
vec3 BRDF_Specular_GGX_Environment( const in GeometricContext geometry, const in vec3 specularColor, const in float roughness ) {
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
const vec4 c0 = vec4( - 1, - 0.0275, - 0.572, 0.022 );
const vec4 c1 = vec4( 1, 0.0425, 1.04, - 0.04 );
vec4 r = roughness * c0 + c1;
float a004 = min( r.x * r.x, exp2( - 9.28 * dotNV ) ) * r.x + r.y;
vec2 AB = vec2( -1.04, 1.04 ) * a004 + r.zw;
return specularColor * AB.x + AB.y;
} // validated
float G_BlinnPhong_Implicit( /* const in float dotNL, const in float dotNV */ ) {
// geometry term is (n dot l)(n dot v) / 4(n dot l)(n dot v)
return 0.25;
}
float D_BlinnPhong( const in float shininess, const in float dotNH ) {
return RECIPROCAL_PI * ( shininess * 0.5 + 1.0 ) * pow( dotNH, shininess );
}
vec3 BRDF_Specular_BlinnPhong( const in IncidentLight incidentLight, const in GeometricContext geometry, const in vec3 specularColor, const in float shininess ) {
vec3 halfDir = normalize( incidentLight.direction + geometry.viewDir );
//float dotNL = saturate( dot( geometry.normal, incidentLight.direction ) );
//float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
float dotNH = saturate( dot( geometry.normal, halfDir ) );
float dotLH = saturate( dot( incidentLight.direction, halfDir ) );
vec3 F = F_Schlick( specularColor, dotLH );
float G = G_BlinnPhong_Implicit( /* dotNL, dotNV */ );
float D = D_BlinnPhong( shininess, dotNH );
return F * ( G * D );
} // validated
// source: http://simonstechblog.blogspot.ca/2011/12/microfacet-brdf.html
float GGXRoughnessToBlinnExponent( const in float ggxRoughness ) {
return ( 2.0 / pow2( ggxRoughness + 0.0001 ) - 2.0 );
}
float BlinnExponentToGGXRoughness( const in float blinnExponent ) {
return sqrt( 2.0 / ( blinnExponent + 2.0 ) );
}
`,EQ=`
#ifdef USE_BUMPMAP
uniform sampler2D bumpMap;
uniform float bumpScale;
// Bump Mapping Unparametrized Surfaces on the GPU by Morten S. Mikkelsen
// http://api.unrealengine.com/attachments/Engine/Rendering/LightingAndShadows/BumpMappingWithoutTangentSpace/mm_sfgrad_bump.pdf
// Evaluate the derivative of the height w.r.t. screen-space using forward differencing (listing 2)
vec2 dHdxy_fwd() {
vec2 dSTdx = dFdx( vUv );
vec2 dSTdy = dFdy( vUv );
float Hll = bumpScale * texture2D( bumpMap, vUv ).x;
float dBx = bumpScale * texture2D( bumpMap, vUv + dSTdx ).x - Hll;
float dBy = bumpScale * texture2D( bumpMap, vUv + dSTdy ).x - Hll;
return vec2( dBx, dBy );
}
vec3 perturbNormalArb( vec3 surf_pos, vec3 surf_norm, vec2 dHdxy ) {
// Workaround for Adreno 3XX dFd*( vec3 ) bug. See #9988
vec3 vSigmaX = vec3( dFdx( surf_pos.x ), dFdx( surf_pos.y ), dFdx( surf_pos.z ) );
vec3 vSigmaY = vec3( dFdy( surf_pos.x ), dFdy( surf_pos.y ), dFdy( surf_pos.z ) );
vec3 vN = surf_norm; // normalized
vec3 R1 = cross( vSigmaY, vN );
vec3 R2 = cross( vN, vSigmaX );
float fDet = dot( vSigmaX, R1 );
fDet *= ( float( gl_FrontFacing ) * 2.0 - 1.0 );
vec3 vGrad = sign( fDet ) * ( dHdxy.x * R1 + dHdxy.y * R2 );
return normalize( abs( fDet ) * surf_norm - vGrad );
}
#endif
`,kQ=`
#if NUM_CLIPPING_PLANES > 0
vec4 plane;
#pragma unroll_loop
for ( int i = 0; i < UNION_CLIPPING_PLANES; i ++ ) {
plane = clippingPlanes[ i ];
if ( dot( vViewPosition, plane.xyz ) > plane.w ) discard;
}
#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES
bool clipped = true;
#pragma unroll_loop
for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {
plane = clippingPlanes[ i ];
clipped = ( dot( vViewPosition, plane.xyz ) > plane.w ) && clipped;
}
if ( clipped ) discard;
#endif
#endif
`,BQ=`
#if NUM_CLIPPING_PLANES > 0
#if ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
varying vec3 vViewPosition;
#endif
uniform vec4 clippingPlanes[ NUM_CLIPPING_PLANES ];
#endif
`,SQ=`
#if NUM_CLIPPING_PLANES > 0 && ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
varying vec3 vViewPosition;
#endif
`,DQ=`
#if NUM_CLIPPING_PLANES > 0 && ! defined( PHYSICAL ) && ! defined( PHONG ) && ! defined( MATCAP )
vViewPosition = - mvPosition.xyz;
#endif
`,xQ=`
#ifdef USE_COLOR
diffuseColor.rgb *= vColor;
#endif
`,TQ=`
#ifdef USE_COLOR
varying vec3 vColor;
#endif
`,IQ=`
#ifdef USE_COLOR
varying vec3 vColor;
#endif
`,PQ=`
#ifdef USE_COLOR
vColor.xyz = color.xyz;
#endif
`,MQ=`
#define PI 3.14159265359
#define PI2 6.28318530718
#define PI_HALF 1.5707963267949
#define RECIPROCAL_PI 0.31830988618
#define RECIPROCAL_PI2 0.15915494
#define LOG2 1.442695
#define EPSILON 1e-6
#define saturate(a) clamp( a, 0.0, 1.0 )
#define whiteCompliment(a) ( 1.0 - saturate( a ) )
float pow2( const in float x ) { return x*x; }
float pow3( const in float x ) { return x*x*x; }
float pow4( const in float x ) { float x2 = x*x; return x2*x2; }
float average( const in vec3 color ) { return dot( color, vec3( 0.3333 ) ); }
// expects values in the range of [0,1]x[0,1], returns values in the [0,1] range.
// do not collapse into a single function per: http://byteblacksmith.com/improvements-to-the-canonical-one-liner-glsl-rand-for-opengl-es-2-0/
highp float rand( const in vec2 uv ) {
const highp float a = 12.9898, b = 78.233, c = 43758.5453;
highp float dt = dot( uv.xy, vec2( a,b ) ), sn = mod( dt, PI );
return fract(sin(sn) * c);
}
struct IncidentLight {
vec3 color;
vec3 direction;
bool visible;
};
struct ReflectedLight {
vec3 directDiffuse;
vec3 directSpecular;
vec3 indirectDiffuse;
vec3 indirectSpecular;
};
struct GeometricContext {
vec3 position;
vec3 normal;
vec3 viewDir;
};
vec3 transformDirection( in vec3 dir, in mat4 matrix ) {
return normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );
}
// http://en.wikibooks.org/wiki/GLSL_Programming/Applying_Matrix_Transformations
vec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {
return normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );
}
vec3 projectOnPlane(in vec3 point, in vec3 pointOnPlane, in vec3 planeNormal ) {
float distance = dot( planeNormal, point - pointOnPlane );
return - distance * planeNormal + point;
}
float sideOfPlane( in vec3 point, in vec3 pointOnPlane, in vec3 planeNormal ) {
return sign( dot( point - pointOnPlane, planeNormal ) );
}
vec3 linePlaneIntersect( in vec3 pointOnLine, in vec3 lineDirection, in vec3 pointOnPlane, in vec3 planeNormal ) {
return lineDirection * ( dot( planeNormal, pointOnPlane - pointOnLine ) / dot( planeNormal, lineDirection ) ) + pointOnLine;
}
mat3 transposeMat3( const in mat3 m ) {
mat3 tmp;
tmp[ 0 ] = vec3( m[ 0 ].x, m[ 1 ].x, m[ 2 ].x );
tmp[ 1 ] = vec3( m[ 0 ].y, m[ 1 ].y, m[ 2 ].y );
tmp[ 2 ] = vec3( m[ 0 ].z, m[ 1 ].z, m[ 2 ].z );
return tmp;
}
// https://en.wikipedia.org/wiki/Relative_luminance
float linearToRelativeLuminance( const in vec3 color ) {
vec3 weights = vec3( 0.2126, 0.7152, 0.0722 );
return dot( weights, color.rgb );
}
`,LQ=`
#ifdef ENVMAP_TYPE_CUBE_UV
#define cubeUV_textureSize (1024.0)
int getFaceFromDirection(vec3 direction) {
vec3 absDirection = abs(direction);
int face = -1;
if( absDirection.x > absDirection.z ) {
if(absDirection.x > absDirection.y )
face = direction.x > 0.0 ? 0 : 3;
else
face = direction.y > 0.0 ? 1 : 4;
}
else {
if(absDirection.z > absDirection.y )
face = direction.z > 0.0 ? 2 : 5;
else
face = direction.y > 0.0 ? 1 : 4;
}
return face;
}
#define cubeUV_maxLods1 (log2(cubeUV_textureSize*0.25) - 1.0)
#define cubeUV_rangeClamp (exp2((6.0 - 1.0) * 2.0))
vec2 MipLevelInfo( vec3 vec, float roughnessLevel, float roughness ) {
float scale = exp2(cubeUV_maxLods1 - roughnessLevel);
float dxRoughness = dFdx(roughness);
float dyRoughness = dFdy(roughness);
vec3 dx = dFdx( vec * scale * dxRoughness );
vec3 dy = dFdy( vec * scale * dyRoughness );
float d = max( dot( dx, dx ), dot( dy, dy ) );
// Clamp the value to the max mip level counts. hard coded to 6 mips
d = clamp(d, 1.0, cubeUV_rangeClamp);
float mipLevel = 0.5 * log2(d);
return vec2(floor(mipLevel), fract(mipLevel));
}
#define cubeUV_maxLods2 (log2(cubeUV_textureSize*0.25) - 2.0)
#define cubeUV_rcpTextureSize (1.0 / cubeUV_textureSize)
vec2 getCubeUV(vec3 direction, float roughnessLevel, float mipLevel) {
mipLevel = roughnessLevel > cubeUV_maxLods2 - 3.0 ? 0.0 : mipLevel;
float a = 16.0 * cubeUV_rcpTextureSize;
vec2 exp2_packed = exp2( vec2( roughnessLevel, mipLevel ) );
vec2 rcp_exp2_packed = vec2( 1.0 ) / exp2_packed;
// float powScale = exp2(roughnessLevel + mipLevel);
float powScale = exp2_packed.x * exp2_packed.y;
// float scale = 1.0 / exp2(roughnessLevel + 2.0 + mipLevel);
float scale = rcp_exp2_packed.x * rcp_exp2_packed.y * 0.25;
// float mipOffset = 0.75*(1.0 - 1.0/exp2(mipLevel))/exp2(roughnessLevel);
float mipOffset = 0.75*(1.0 - rcp_exp2_packed.y) * rcp_exp2_packed.x;
bool bRes = mipLevel == 0.0;
scale = bRes && (scale < a) ? a : scale;
vec3 r;
vec2 offset;
int face = getFaceFromDirection(direction);
float rcpPowScale = 1.0 / powScale;
if( face == 0) {
r = vec3(direction.x, -direction.z, direction.y);
offset = vec2(0.0+mipOffset,0.75 * rcpPowScale);
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
}
else if( face == 1) {
r = vec3(direction.y, direction.x, direction.z);
offset = vec2(scale+mipOffset, 0.75 * rcpPowScale);
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
}
else if( face == 2) {
r = vec3(direction.z, direction.x, direction.y);
offset = vec2(2.0*scale+mipOffset, 0.75 * rcpPowScale);
offset.y = bRes && (offset.y < 2.0*a) ? a : offset.y;
}
else if( face == 3) {
r = vec3(direction.x, direction.z, direction.y);
offset = vec2(0.0+mipOffset,0.5 * rcpPowScale);
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
}
else if( face == 4) {
r = vec3(direction.y, direction.x, -direction.z);
offset = vec2(scale+mipOffset, 0.5 * rcpPowScale);
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
}
else {
r = vec3(direction.z, -direction.x, direction.y);
offset = vec2(2.0*scale+mipOffset, 0.5 * rcpPowScale);
offset.y = bRes && (offset.y < 2.0*a) ? 0.0 : offset.y;
}
r = normalize(r);
float texelOffset = 0.5 * cubeUV_rcpTextureSize;
vec2 s = ( r.yz / abs( r.x ) + vec2( 1.0 ) ) * 0.5;
vec2 base = offset + vec2( texelOffset );
return base + s * ( scale - 2.0 * texelOffset );
}
#define cubeUV_maxLods3 (log2(cubeUV_textureSize*0.25) - 3.0)
vec4 textureCubeUV( sampler2D envMap, vec3 reflectedDirection, float roughness ) {
float roughnessVal = roughness* cubeUV_maxLods3;
float r1 = floor(roughnessVal);
float r2 = r1 + 1.0;
float t = fract(roughnessVal);
vec2 mipInfo = MipLevelInfo(reflectedDirection, r1, roughness);
float s = mipInfo.y;
float level0 = mipInfo.x;
float level1 = level0 + 1.0;
level1 = level1 > 5.0 ? 5.0 : level1;
// round to nearest mipmap if we are not interpolating.
level0 += min( floor( s + 0.5 ), 5.0 );
// Tri linear interpolation.
vec2 uv_10 = getCubeUV(reflectedDirection, r1, level0);
vec4 color10 = envMapTexelToLinear(texture2D(envMap, uv_10));
vec2 uv_20 = getCubeUV(reflectedDirection, r2, level0);
vec4 color20 = envMapTexelToLinear(texture2D(envMap, uv_20));
vec4 result = mix(color10, color20, t);
return vec4(result.rgb, 1.0);
}
#endif
`,RQ=`
vec3 transformedNormal = normalMatrix * objectNormal;
#ifdef FLIP_SIDED
transformedNormal = - transformedNormal;
#endif
`,UQ=`
#ifdef USE_DISPLACEMENTMAP
uniform sampler2D displacementMap;
uniform float displacementScale;
uniform float displacementBias;
#endif
`,OQ=`
#ifdef USE_DISPLACEMENTMAP
transformed += normalize( objectNormal ) * ( texture2D( displacementMap, uv ).x * displacementScale + displacementBias );
#endif
`,NQ=`
#ifdef USE_EMISSIVEMAP
vec4 emissiveColor = texture2D( emissiveMap, vUv );
emissiveColor.rgb = emissiveMapTexelToLinear( emissiveColor ).rgb;
totalEmissiveRadiance *= emissiveColor.rgb;
#endif
`,HQ=`
#ifdef USE_EMISSIVEMAP
uniform sampler2D emissiveMap;
#endif
`,QQ=`
gl_FragColor = linearToOutputTexel( gl_FragColor );
`,jQ=`
// For a discussion of what this is, please read this: http://lousodrome.net/blog/light/2013/05/26/gamma-correct-and-hdr-rendering-in-a-32-bits-buffer/
vec4 LinearToLinear( in vec4 value ) {
return value;
}
vec4 GammaToLinear( in vec4 value, in float gammaFactor ) {
return vec4( pow( value.rgb, vec3( gammaFactor ) ), value.a );
}
vec4 LinearToGamma( in vec4 value, in float gammaFactor ) {
return vec4( pow( value.rgb, vec3( 1.0 / gammaFactor ) ), value.a );
}
vec4 sRGBToLinear( in vec4 value ) {
return vec4( mix( pow( value.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), value.rgb * 0.0773993808, vec3( lessThanEqual( value.rgb, vec3( 0.04045 ) ) ) ), value.a );
}
vec4 LinearTosRGB( in vec4 value ) {
return vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );
}
vec4 RGBEToLinear( in vec4 value ) {
return vec4( value.rgb * exp2( value.a * 255.0 - 128.0 ), 1.0 );
}
vec4 LinearToRGBE( in vec4 value ) {
float maxComponent = max( max( value.r, value.g ), value.b );
float fExp = clamp( ceil( log2( maxComponent ) ), -128.0, 127.0 );
return vec4( value.rgb / exp2( fExp ), ( fExp + 128.0 ) / 255.0 );
// return vec4( value.brg, ( 3.0 + 128.0 ) / 256.0 );
}
// reference: http://iwasbeingirony.blogspot.ca/2010/06/difference-between-rgbm-and-rgbd.html
vec4 RGBMToLinear( in vec4 value, in float maxRange ) {
return vec4( value.rgb * value.a * maxRange, 1.0 );
}
vec4 LinearToRGBM( in vec4 value, in float maxRange ) {
float maxRGB = max( value.r, max( value.g, value.b ) );
float M = clamp( maxRGB / maxRange, 0.0, 1.0 );
M = ceil( M * 255.0 ) / 255.0;
return vec4( value.rgb / ( M * maxRange ), M );
}
// reference: http://iwasbeingirony.blogspot.ca/2010/06/difference-between-rgbm-and-rgbd.html
vec4 RGBDToLinear( in vec4 value, in float maxRange ) {
return vec4( value.rgb * ( ( maxRange / 255.0 ) / value.a ), 1.0 );
}
vec4 LinearToRGBD( in vec4 value, in float maxRange ) {
float maxRGB = max( value.r, max( value.g, value.b ) );
float D = max( maxRange / maxRGB, 1.0 );
D = min( floor( D ) / 255.0, 1.0 );
return vec4( value.rgb * ( D * ( 255.0 / maxRange ) ), D );
}
// LogLuv reference: http://graphicrants.blogspot.ca/2009/04/rgbm-color-encoding.html
// M matrix, for encoding
const mat3 cLogLuvM = mat3( 0.2209, 0.3390, 0.4184, 0.1138, 0.6780, 0.7319, 0.0102, 0.1130, 0.2969 );
vec4 LinearToLogLuv( in vec4 value ) {
vec3 Xp_Y_XYZp = value.rgb * cLogLuvM;
Xp_Y_XYZp = max( Xp_Y_XYZp, vec3( 1e-6, 1e-6, 1e-6 ) );
vec4 vResult;
vResult.xy = Xp_Y_XYZp.xy / Xp_Y_XYZp.z;
float Le = 2.0 * log2(Xp_Y_XYZp.y) + 127.0;
vResult.w = fract( Le );
vResult.z = ( Le - ( floor( vResult.w * 255.0 ) ) / 255.0 ) / 255.0;
return vResult;
}
// Inverse M matrix, for decoding
const mat3 cLogLuvInverseM = mat3( 6.0014, -2.7008, -1.7996, -1.3320, 3.1029, -5.7721, 0.3008, -1.0882, 5.6268 );
vec4 LogLuvToLinear( in vec4 value ) {
float Le = value.z * 255.0 + value.w;
vec3 Xp_Y_XYZp;
Xp_Y_XYZp.y = exp2( ( Le - 127.0 ) / 2.0 );
Xp_Y_XYZp.z = Xp_Y_XYZp.y / value.y;
Xp_Y_XYZp.x = value.x * Xp_Y_XYZp.z;
vec3 vRGB = Xp_Y_XYZp.rgb * cLogLuvInverseM;
return vec4( max( vRGB, 0.0 ), 1.0 );
}
`,GQ=`
#ifdef USE_ENVMAP
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
vec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );
// Transforming Normal Vectors with the Inverse Transformation
vec3 worldNormal = inverseTransformDirection( normal, viewMatrix );
#ifdef ENVMAP_MODE_REFLECTION
vec3 reflectVec = reflect( cameraToVertex, worldNormal );
#else
vec3 reflectVec = refract( cameraToVertex, worldNormal, refractionRatio );
#endif
#else
vec3 reflectVec = vReflect;
#endif
#ifdef ENVMAP_TYPE_CUBE
vec4 envColor = textureCube( envMap, vec3( flipEnvMap * reflectVec.x, reflectVec.yz ) );
#elif defined( ENVMAP_TYPE_EQUIREC )
vec2 sampleUV;
reflectVec = normalize( reflectVec );
sampleUV.y = asin( clamp( reflectVec.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
sampleUV.x = atan( reflectVec.z, reflectVec.x ) * RECIPROCAL_PI2 + 0.5;
vec4 envColor = texture2D( envMap, sampleUV );
#elif defined( ENVMAP_TYPE_SPHERE )
reflectVec = normalize( reflectVec );
vec3 reflectView = normalize( ( viewMatrix * vec4( reflectVec, 0.0 ) ).xyz + vec3( 0.0, 0.0, 1.0 ) );
vec4 envColor = texture2D( envMap, reflectView.xy * 0.5 + 0.5 );
#else
vec4 envColor = vec4( 0.0 );
#endif
envColor = envMapTexelToLinear( envColor );
#ifdef ENVMAP_BLENDING_MULTIPLY
outgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );
#elif defined( ENVMAP_BLENDING_MIX )
outgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );
#elif defined( ENVMAP_BLENDING_ADD )
outgoingLight += envColor.xyz * specularStrength * reflectivity;
#endif
#endif
`,zQ=`
#if defined( USE_ENVMAP ) || defined( PHYSICAL )
uniform float reflectivity;
uniform float envMapIntensity;
#endif
#ifdef USE_ENVMAP
#if ! defined( PHYSICAL ) && ( defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) )
varying vec3 vWorldPosition;
#endif
#ifdef ENVMAP_TYPE_CUBE
uniform samplerCube envMap;
#else
uniform sampler2D envMap;
#endif
uniform float flipEnvMap;
uniform int maxMipLevel;
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) || defined( PHYSICAL )
uniform float refractionRatio;
#else
varying vec3 vReflect;
#endif
#endif
`,qQ=`
#ifdef USE_ENVMAP
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
varying vec3 vWorldPosition;
#else
varying vec3 vReflect;
uniform float refractionRatio;
#endif
#endif
`,VQ=`
#ifdef USE_ENVMAP
#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )
vWorldPosition = worldPosition.xyz;
#else
vec3 cameraToVertex = normalize( worldPosition.xyz - cameraPosition );
vec3 worldNormal = inverseTransformDirection( transformedNormal, viewMatrix );
#ifdef ENVMAP_MODE_REFLECTION
vReflect = reflect( cameraToVertex, worldNormal );
#else
vReflect = refract( cameraToVertex, worldNormal, refractionRatio );
#endif
#endif
#endif
`,WQ=`
#ifdef USE_FOG
fogDepth = -mvPosition.z;
#endif
`,KQ=`
#ifdef USE_FOG
varying float fogDepth;
#endif
`,YQ=`
#ifdef USE_FOG
#ifdef FOG_EXP2
float fogFactor = whiteCompliment( exp2( - fogDensity * fogDensity * fogDepth * fogDepth * LOG2 ) );
#else
float fogFactor = smoothstep( fogNear, fogFar, fogDepth );
#endif
gl_FragColor.rgb = mix( gl_FragColor.rgb, fogColor, fogFactor );
#endif
`,XQ=`
#ifdef USE_FOG
uniform vec3 fogColor;
varying float fogDepth;
#ifdef FOG_EXP2
uniform float fogDensity;
#else
uniform float fogNear;
uniform float fogFar;
#endif
#endif
`,JQ=`
#ifdef TOON
uniform sampler2D gradientMap;
vec3 getGradientIrradiance( vec3 normal, vec3 lightDirection ) {
// dotNL will be from -1.0 to 1.0
float dotNL = dot( normal, lightDirection );
vec2 coord = vec2( dotNL * 0.5 + 0.5, 0.0 );
#ifdef USE_GRADIENTMAP
return texture2D( gradientMap, coord ).rgb;
#else
return ( coord.x < 0.7 ) ? vec3( 0.7 ) : vec3( 1.0 );
#endif
}
#endif
`,ZQ=`
#ifdef USE_LIGHTMAP
reflectedLight.indirectDiffuse += PI * texture2D( lightMap, vUv2 ).xyz * lightMapIntensity; // factor of PI should not be present; included here to prevent breakage
#endif
`,$Q=`
#ifdef USE_LIGHTMAP
uniform sampler2D lightMap;
uniform float lightMapIntensity;
#endif
`,ej=`
vec3 diffuse = vec3( 1.0 );
GeometricContext geometry;
geometry.position = mvPosition.xyz;
geometry.normal = normalize( transformedNormal );
geometry.viewDir = normalize( -mvPosition.xyz );
GeometricContext backGeometry;
backGeometry.position = geometry.position;
backGeometry.normal = -geometry.normal;
backGeometry.viewDir = geometry.viewDir;
vLightFront = vec3( 0.0 );
#ifdef DOUBLE_SIDED
vLightBack = vec3( 0.0 );
#endif
IncidentLight directLight;
float dotNL;
vec3 directLightColor_Diffuse;
#if NUM_POINT_LIGHTS > 0
#pragma unroll_loop
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
getPointDirectLightIrradiance( pointLights[ i ], geometry, directLight );
dotNL = dot( geometry.normal, directLight.direction );
directLightColor_Diffuse = PI * directLight.color;
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
#ifdef DOUBLE_SIDED
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
#endif
}
#endif
#if NUM_SPOT_LIGHTS > 0
#pragma unroll_loop
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
getSpotDirectLightIrradiance( spotLights[ i ], geometry, directLight );
dotNL = dot( geometry.normal, directLight.direction );
directLightColor_Diffuse = PI * directLight.color;
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
#ifdef DOUBLE_SIDED
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
#endif
}
#endif
/*
#if NUM_RECT_AREA_LIGHTS > 0
for ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {
// TODO (abelnation): implement
}
#endif
*/
#if NUM_DIR_LIGHTS > 0
#pragma unroll_loop
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
getDirectionalDirectLightIrradiance( directionalLights[ i ], geometry, directLight );
dotNL = dot( geometry.normal, directLight.direction );
directLightColor_Diffuse = PI * directLight.color;
vLightFront += saturate( dotNL ) * directLightColor_Diffuse;
#ifdef DOUBLE_SIDED
vLightBack += saturate( -dotNL ) * directLightColor_Diffuse;
#endif
}
#endif
#if NUM_HEMI_LIGHTS > 0
#pragma unroll_loop
for ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {
vLightFront += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry );
#ifdef DOUBLE_SIDED
vLightBack += getHemisphereLightIrradiance( hemisphereLights[ i ], backGeometry );
#endif
}
#endif
`,tj=`
uniform vec3 ambientLightColor;
vec3 getAmbientLightIrradiance( const in vec3 ambientLightColor ) {
vec3 irradiance = ambientLightColor;
#ifndef PHYSICALLY_CORRECT_LIGHTS
irradiance *= PI;
#endif
return irradiance;
}
#if NUM_DIR_LIGHTS > 0
struct DirectionalLight {
vec3 direction;
vec3 color;
int shadow;
float shadowBias;
float shadowRadius;
vec2 shadowMapSize;
};
uniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];
void getDirectionalDirectLightIrradiance( const in DirectionalLight directionalLight, const in GeometricContext geometry, out IncidentLight directLight ) {
directLight.color = directionalLight.color;
directLight.direction = directionalLight.direction;
directLight.visible = true;
}
#endif
#if NUM_POINT_LIGHTS > 0
struct PointLight {
vec3 position;
vec3 color;
float distance;
float decay;
int shadow;
float shadowBias;
float shadowRadius;
vec2 shadowMapSize;
float shadowCameraNear;
float shadowCameraFar;
};
uniform PointLight pointLights[ NUM_POINT_LIGHTS ];
// directLight is an out parameter as having it as a return value caused compiler errors on some devices
void getPointDirectLightIrradiance( const in PointLight pointLight, const in GeometricContext geometry, out IncidentLight directLight ) {
vec3 lVector = pointLight.position - geometry.position;
directLight.direction = normalize( lVector );
float lightDistance = length( lVector );
directLight.color = pointLight.color;
directLight.color *= punctualLightIntensityToIrradianceFactor( lightDistance, pointLight.distance, pointLight.decay );
directLight.visible = ( directLight.color != vec3( 0.0 ) );
}
#endif
#if NUM_SPOT_LIGHTS > 0
struct SpotLight {
vec3 position;
vec3 direction;
vec3 color;
float distance;
float decay;
float coneCos;
float penumbraCos;
int shadow;
float shadowBias;
float shadowRadius;
vec2 shadowMapSize;
};
uniform SpotLight spotLights[ NUM_SPOT_LIGHTS ];
// directLight is an out parameter as having it as a return value caused compiler errors on some devices
void getSpotDirectLightIrradiance( const in SpotLight spotLight, const in GeometricContext geometry, out IncidentLight directLight ) {
vec3 lVector = spotLight.position - geometry.position;
directLight.direction = normalize( lVector );
float lightDistance = length( lVector );
float angleCos = dot( directLight.direction, spotLight.direction );
if ( angleCos > spotLight.coneCos ) {
float spotEffect = smoothstep( spotLight.coneCos, spotLight.penumbraCos, angleCos );
directLight.color = spotLight.color;
directLight.color *= spotEffect * punctualLightIntensityToIrradianceFactor( lightDistance, spotLight.distance, spotLight.decay );
directLight.visible = true;
} else {
directLight.color = vec3( 0.0 );
directLight.visible = false;
}
}
#endif
#if NUM_RECT_AREA_LIGHTS > 0
struct RectAreaLight {
vec3 color;
vec3 position;
vec3 halfWidth;
vec3 halfHeight;
};
// Pre-computed values of LinearTransformedCosine approximation of BRDF
// BRDF approximation Texture is 64x64
uniform sampler2D ltc_1; // RGBA Float
uniform sampler2D ltc_2; // RGBA Float
uniform RectAreaLight rectAreaLights[ NUM_RECT_AREA_LIGHTS ];
#endif
#if NUM_HEMI_LIGHTS > 0
struct HemisphereLight {
vec3 direction;
vec3 skyColor;
vec3 groundColor;
};
uniform HemisphereLight hemisphereLights[ NUM_HEMI_LIGHTS ];
vec3 getHemisphereLightIrradiance( const in HemisphereLight hemiLight, const in GeometricContext geometry ) {
float dotNL = dot( geometry.normal, hemiLight.direction );
float hemiDiffuseWeight = 0.5 * dotNL + 0.5;
vec3 irradiance = mix( hemiLight.groundColor, hemiLight.skyColor, hemiDiffuseWeight );
#ifndef PHYSICALLY_CORRECT_LIGHTS
irradiance *= PI;
#endif
return irradiance;
}
#endif
`,ij=`
#if defined( USE_ENVMAP ) && defined( PHYSICAL )
vec3 getLightProbeIndirectIrradiance( /*const in SpecularLightProbe specularLightProbe,*/ const in GeometricContext geometry, const in int maxMIPLevel ) {
vec3 worldNormal = inverseTransformDirection( geometry.normal, viewMatrix );
#ifdef ENVMAP_TYPE_CUBE
vec3 queryVec = vec3( flipEnvMap * worldNormal.x, worldNormal.yz );
// TODO: replace with properly filtered cubemaps and access the irradiance LOD level, be it the last LOD level
// of a specular cubemap, or just the default level of a specially created irradiance cubemap.
#ifdef TEXTURE_LOD_EXT
vec4 envMapColor = textureCubeLodEXT( envMap, queryVec, float( maxMIPLevel ) );
#else
// force the bias high to get the last LOD level as it is the most blurred.
vec4 envMapColor = textureCube( envMap, queryVec, float( maxMIPLevel ) );
#endif
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
#elif defined( ENVMAP_TYPE_CUBE_UV )
vec3 queryVec = vec3( flipEnvMap * worldNormal.x, worldNormal.yz );
vec4 envMapColor = textureCubeUV( envMap, queryVec, 1.0 );
#else
vec4 envMapColor = vec4( 0.0 );
#endif
return PI * envMapColor.rgb * envMapIntensity;
}
// taken from here: http://casual-effects.blogspot.ca/2011/08/plausible-environment-lighting-in-two.html
float getSpecularMIPLevel( const in float blinnShininessExponent, const in int maxMIPLevel ) {
//float envMapWidth = pow( 2.0, maxMIPLevelScalar );
//float desiredMIPLevel = log2( envMapWidth * sqrt( 3.0 ) ) - 0.5 * log2( pow2( blinnShininessExponent ) + 1.0 );
float maxMIPLevelScalar = float( maxMIPLevel );
float desiredMIPLevel = maxMIPLevelScalar + 0.79248 - 0.5 * log2( pow2( blinnShininessExponent ) + 1.0 );
// clamp to allowable LOD ranges.
return clamp( desiredMIPLevel, 0.0, maxMIPLevelScalar );
}
vec3 getLightProbeIndirectRadiance( /*const in SpecularLightProbe specularLightProbe,*/ const in GeometricContext geometry, const in float blinnShininessExponent, const in int maxMIPLevel ) {
#ifdef ENVMAP_MODE_REFLECTION
vec3 reflectVec = reflect( -geometry.viewDir, geometry.normal );
#else
vec3 reflectVec = refract( -geometry.viewDir, geometry.normal, refractionRatio );
#endif
reflectVec = inverseTransformDirection( reflectVec, viewMatrix );
float specularMIPLevel = getSpecularMIPLevel( blinnShininessExponent, maxMIPLevel );
#ifdef ENVMAP_TYPE_CUBE
vec3 queryReflectVec = vec3( flipEnvMap * reflectVec.x, reflectVec.yz );
#ifdef TEXTURE_LOD_EXT
vec4 envMapColor = textureCubeLodEXT( envMap, queryReflectVec, specularMIPLevel );
#else
vec4 envMapColor = textureCube( envMap, queryReflectVec, specularMIPLevel );
#endif
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
#elif defined( ENVMAP_TYPE_CUBE_UV )
vec3 queryReflectVec = vec3( flipEnvMap * reflectVec.x, reflectVec.yz );
vec4 envMapColor = textureCubeUV( envMap, queryReflectVec, BlinnExponentToGGXRoughness(blinnShininessExponent ));
#elif defined( ENVMAP_TYPE_EQUIREC )
vec2 sampleUV;
sampleUV.y = asin( clamp( reflectVec.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
sampleUV.x = atan( reflectVec.z, reflectVec.x ) * RECIPROCAL_PI2 + 0.5;
#ifdef TEXTURE_LOD_EXT
vec4 envMapColor = texture2DLodEXT( envMap, sampleUV, specularMIPLevel );
#else
vec4 envMapColor = texture2D( envMap, sampleUV, specularMIPLevel );
#endif
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
#elif defined( ENVMAP_TYPE_SPHERE )
vec3 reflectView = normalize( ( viewMatrix * vec4( reflectVec, 0.0 ) ).xyz + vec3( 0.0,0.0,1.0 ) );
#ifdef TEXTURE_LOD_EXT
vec4 envMapColor = texture2DLodEXT( envMap, reflectView.xy * 0.5 + 0.5, specularMIPLevel );
#else
vec4 envMapColor = texture2D( envMap, reflectView.xy * 0.5 + 0.5, specularMIPLevel );
#endif
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
#endif
return envMapColor.rgb * envMapIntensity;
}
#endif
`,nj=`
BlinnPhongMaterial material;
material.diffuseColor = diffuseColor.rgb;
material.specularColor = specular;
material.specularShininess = shininess;
material.specularStrength = specularStrength;
`,aj=`
varying vec3 vViewPosition;
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
struct BlinnPhongMaterial {
vec3 diffuseColor;
vec3 specularColor;
float specularShininess;
float specularStrength;
};
void RE_Direct_BlinnPhong( const in IncidentLight directLight, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {
#ifdef TOON
vec3 irradiance = getGradientIrradiance( geometry.normal, directLight.direction ) * directLight.color;
#else
float dotNL = saturate( dot( geometry.normal, directLight.direction ) );
vec3 irradiance = dotNL * directLight.color;
#endif
#ifndef PHYSICALLY_CORRECT_LIGHTS
irradiance *= PI; // punctual light
#endif
reflectedLight.directDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
reflectedLight.directSpecular += irradiance * BRDF_Specular_BlinnPhong( directLight, geometry, material.specularColor, material.specularShininess ) * material.specularStrength;
}
void RE_IndirectDiffuse_BlinnPhong( const in vec3 irradiance, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {
reflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
}
#define RE_Direct RE_Direct_BlinnPhong
#define RE_IndirectDiffuse RE_IndirectDiffuse_BlinnPhong
#define Material_LightProbeLOD( material ) (0)
`,sj=`
PhysicalMaterial material;
material.diffuseColor = diffuseColor.rgb * ( 1.0 - metalnessFactor );
material.specularRoughness = clamp( roughnessFactor, 0.04, 1.0 );
#ifdef STANDARD
material.specularColor = mix( vec3( DEFAULT_SPECULAR_COEFFICIENT ), diffuseColor.rgb, metalnessFactor );
#else
material.specularColor = mix( vec3( MAXIMUM_SPECULAR_COEFFICIENT * pow2( reflectivity ) ), diffuseColor.rgb, metalnessFactor );
material.clearCoat = saturate( clearCoat ); // Burley clearcoat model
material.clearCoatRoughness = clamp( clearCoatRoughness, 0.04, 1.0 );
#endif
`,rj=`
struct PhysicalMaterial {
vec3 diffuseColor;
float specularRoughness;
vec3 specularColor;
#ifndef STANDARD
float clearCoat;
float clearCoatRoughness;
#endif
};
#define MAXIMUM_SPECULAR_COEFFICIENT 0.16
#define DEFAULT_SPECULAR_COEFFICIENT 0.04
// Clear coat directional hemishperical reflectance (this approximation should be improved)
float clearCoatDHRApprox( const in float roughness, const in float dotNL ) {
return DEFAULT_SPECULAR_COEFFICIENT + ( 1.0 - DEFAULT_SPECULAR_COEFFICIENT ) * ( pow( 1.0 - dotNL, 5.0 ) * pow( 1.0 - roughness, 2.0 ) );
}
#if NUM_RECT_AREA_LIGHTS > 0
void RE_Direct_RectArea_Physical( const in RectAreaLight rectAreaLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
vec3 normal = geometry.normal;
vec3 viewDir = geometry.viewDir;
vec3 position = geometry.position;
vec3 lightPos = rectAreaLight.position;
vec3 halfWidth = rectAreaLight.halfWidth;
vec3 halfHeight = rectAreaLight.halfHeight;
vec3 lightColor = rectAreaLight.color;
float roughness = material.specularRoughness;
vec3 rectCoords[ 4 ];
rectCoords[ 0 ] = lightPos + halfWidth - halfHeight; // counterclockwise; light shines in local neg z direction
rectCoords[ 1 ] = lightPos - halfWidth - halfHeight;
rectCoords[ 2 ] = lightPos - halfWidth + halfHeight;
rectCoords[ 3 ] = lightPos + halfWidth + halfHeight;
vec2 uv = LTC_Uv( normal, viewDir, roughness );
vec4 t1 = texture2D( ltc_1, uv );
vec4 t2 = texture2D( ltc_2, uv );
mat3 mInv = mat3(
vec3( t1.x, 0, t1.y ),
vec3( 0, 1, 0 ),
vec3( t1.z, 0, t1.w )
);
// LTC Fresnel Approximation by Stephen Hill
// http://blog.selfshadow.com/publications/s2016-advances/s2016_ltc_fresnel.pdf
vec3 fresnel = ( material.specularColor * t2.x + ( vec3( 1.0 ) - material.specularColor ) * t2.y );
reflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );
reflectedLight.directDiffuse += lightColor * material.diffuseColor * LTC_Evaluate( normal, viewDir, position, mat3( 1.0 ), rectCoords );
}
#endif
void RE_Direct_Physical( const in IncidentLight directLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
float dotNL = saturate( dot( geometry.normal, directLight.direction ) );
vec3 irradiance = dotNL * directLight.color;
#ifndef PHYSICALLY_CORRECT_LIGHTS
irradiance *= PI; // punctual light
#endif
#ifndef STANDARD
float clearCoatDHR = material.clearCoat * clearCoatDHRApprox( material.clearCoatRoughness, dotNL );
#else
float clearCoatDHR = 0.0;
#endif
reflectedLight.directSpecular += ( 1.0 - clearCoatDHR ) * irradiance * BRDF_Specular_GGX( directLight, geometry, material.specularColor, material.specularRoughness );
reflectedLight.directDiffuse += ( 1.0 - clearCoatDHR ) * irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
#ifndef STANDARD
reflectedLight.directSpecular += irradiance * material.clearCoat * BRDF_Specular_GGX( directLight, geometry, vec3( DEFAULT_SPECULAR_COEFFICIENT ), material.clearCoatRoughness );
#endif
}
void RE_IndirectDiffuse_Physical( const in vec3 irradiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
reflectedLight.indirectDiffuse += irradiance * BRDF_Diffuse_Lambert( material.diffuseColor );
}
void RE_IndirectSpecular_Physical( const in vec3 radiance, const in vec3 clearCoatRadiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {
#ifndef STANDARD
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
float dotNL = dotNV;
float clearCoatDHR = material.clearCoat * clearCoatDHRApprox( material.clearCoatRoughness, dotNL );
#else
float clearCoatDHR = 0.0;
#endif
reflectedLight.indirectSpecular += ( 1.0 - clearCoatDHR ) * radiance * BRDF_Specular_GGX_Environment( geometry, material.specularColor, material.specularRoughness );
#ifndef STANDARD
reflectedLight.indirectSpecular += clearCoatRadiance * material.clearCoat * BRDF_Specular_GGX_Environment( geometry, vec3( DEFAULT_SPECULAR_COEFFICIENT ), material.clearCoatRoughness );
#endif
}
#define RE_Direct RE_Direct_Physical
#define RE_Direct_RectArea RE_Direct_RectArea_Physical
#define RE_IndirectDiffuse RE_IndirectDiffuse_Physical
#define RE_IndirectSpecular RE_IndirectSpecular_Physical
#define Material_BlinnShininessExponent( material ) GGXRoughnessToBlinnExponent( material.specularRoughness )
#define Material_ClearCoat_BlinnShininessExponent( material ) GGXRoughnessToBlinnExponent( material.clearCoatRoughness )
// ref: https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
float computeSpecularOcclusion( const in float dotNV, const in float ambientOcclusion, const in float roughness ) {
return saturate( pow( dotNV + ambientOcclusion, exp2( - 16.0 * roughness - 1.0 ) ) - 1.0 + ambientOcclusion );
}
`,oj=`
/**
* This is a template that can be used to light a material, it uses pluggable
* RenderEquations (RE)for specific lighting scenarios.
*
* Instructions for use:
* - Ensure that both RE_Direct, RE_IndirectDiffuse and RE_IndirectSpecular are defined
* - If you have defined an RE_IndirectSpecular, you need to also provide a Material_LightProbeLOD. <---- ???
* - Create a material parameter that is to be passed as the third parameter to your lighting functions.
*
* TODO:
* - Add area light support.
* - Add sphere light support.
* - Add diffuse light probe (irradiance cubemap) support.
*/
GeometricContext geometry;
geometry.position = - vViewPosition;
geometry.normal = normal;
geometry.viewDir = normalize( vViewPosition );
IncidentLight directLight;
#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )
PointLight pointLight;
#pragma unroll_loop
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
pointLight = pointLights[ i ];
getPointDirectLightIrradiance( pointLight, geometry, directLight );
#ifdef USE_SHADOWMAP
directLight.color *= all( bvec2( pointLight.shadow, directLight.visible ) ) ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;
#endif
RE_Direct( directLight, geometry, material, reflectedLight );
}
#endif
#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )
SpotLight spotLight;
#pragma unroll_loop
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
spotLight = spotLights[ i ];
getSpotDirectLightIrradiance( spotLight, geometry, directLight );
#ifdef USE_SHADOWMAP
directLight.color *= all( bvec2( spotLight.shadow, directLight.visible ) ) ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowBias, spotLight.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;
#endif
RE_Direct( directLight, geometry, material, reflectedLight );
}
#endif
#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct )
DirectionalLight directionalLight;
#pragma unroll_loop
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
directionalLight = directionalLights[ i ];
getDirectionalDirectLightIrradiance( directionalLight, geometry, directLight );
#ifdef USE_SHADOWMAP
directLight.color *= all( bvec2( directionalLight.shadow, directLight.visible ) ) ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
#endif
RE_Direct( directLight, geometry, material, reflectedLight );
}
#endif
#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )
RectAreaLight rectAreaLight;
#pragma unroll_loop
for ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {
rectAreaLight = rectAreaLights[ i ];
RE_Direct_RectArea( rectAreaLight, geometry, material, reflectedLight );
}
#endif
#if defined( RE_IndirectDiffuse )
vec3 irradiance = getAmbientLightIrradiance( ambientLightColor );
#if ( NUM_HEMI_LIGHTS > 0 )
#pragma unroll_loop
for ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {
irradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry );
}
#endif
#endif
#if defined( RE_IndirectSpecular )
vec3 radiance = vec3( 0.0 );
vec3 clearCoatRadiance = vec3( 0.0 );
#endif
`,lj=`
#if defined( RE_IndirectDiffuse )
#ifdef USE_LIGHTMAP
vec3 lightMapIrradiance = texture2D( lightMap, vUv2 ).xyz * lightMapIntensity;
#ifndef PHYSICALLY_CORRECT_LIGHTS
lightMapIrradiance *= PI; // factor of PI should not be present; included here to prevent breakage
#endif
irradiance += lightMapIrradiance;
#endif
#if defined( USE_ENVMAP ) && defined( PHYSICAL ) && defined( ENVMAP_TYPE_CUBE_UV )
irradiance += getLightProbeIndirectIrradiance( /*lightProbe,*/ geometry, maxMipLevel );
#endif
#endif
#if defined( USE_ENVMAP ) && defined( RE_IndirectSpecular )
radiance += getLightProbeIndirectRadiance( /*specularLightProbe,*/ geometry, Material_BlinnShininessExponent( material ), maxMipLevel );
#ifndef STANDARD
clearCoatRadiance += getLightProbeIndirectRadiance( /*specularLightProbe,*/ geometry, Material_ClearCoat_BlinnShininessExponent( material ), maxMipLevel );
#endif
#endif
`,uj=`
#if defined( RE_IndirectDiffuse )
RE_IndirectDiffuse( irradiance, geometry, material, reflectedLight );
#endif
#if defined( RE_IndirectSpecular )
RE_IndirectSpecular( radiance, clearCoatRadiance, geometry, material, reflectedLight );
#endif
`,cj=`
#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )
gl_FragDepthEXT = log2( vFragDepth ) * logDepthBufFC * 0.5;
#endif
`,dj=`
#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )
uniform float logDepthBufFC;
varying float vFragDepth;
#endif
`,fj=`
#ifdef USE_LOGDEPTHBUF
#ifdef USE_LOGDEPTHBUF_EXT
varying float vFragDepth;
#else
uniform float logDepthBufFC;
#endif
#endif
`,Aj=`
#ifdef USE_LOGDEPTHBUF
#ifdef USE_LOGDEPTHBUF_EXT
vFragDepth = 1.0 + gl_Position.w;
#else
gl_Position.z = log2( max( EPSILON, gl_Position.w + 1.0 ) ) * logDepthBufFC - 1.0;
gl_Position.z *= gl_Position.w;
#endif
#endif
`,hj=`
#ifdef USE_MAP
vec4 texelColor = texture2D( map, vUv );
texelColor = mapTexelToLinear( texelColor );
diffuseColor *= texelColor;
#endif
`,gj=`
#ifdef USE_MAP
uniform sampler2D map;
#endif
`,pj=`
#ifdef USE_MAP
vec2 uv = ( uvTransform * vec3( gl_PointCoord.x, 1.0 - gl_PointCoord.y, 1 ) ).xy;
vec4 mapTexel = texture2D( map, uv );
diffuseColor *= mapTexelToLinear( mapTexel );
#endif
`,mj=`
#ifdef USE_MAP
uniform mat3 uvTransform;
uniform sampler2D map;
#endif
`,_j=`
float metalnessFactor = metalness;
#ifdef USE_METALNESSMAP
vec4 texelMetalness = texture2D( metalnessMap, vUv );
// reads channel B, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
metalnessFactor *= texelMetalness.b;
#endif
`,Fj=`
#ifdef USE_METALNESSMAP
uniform sampler2D metalnessMap;
#endif
`,bj=`
#ifdef USE_MORPHNORMALS
objectNormal += ( morphNormal0 - normal ) * morphTargetInfluences[ 0 ];
objectNormal += ( morphNormal1 - normal ) * morphTargetInfluences[ 1 ];
objectNormal += ( morphNormal2 - normal ) * morphTargetInfluences[ 2 ];
objectNormal += ( morphNormal3 - normal ) * morphTargetInfluences[ 3 ];
#endif
`,vj=`
#ifdef USE_MORPHTARGETS
#ifndef USE_MORPHNORMALS
uniform float morphTargetInfluences[ 8 ];
#else
uniform float morphTargetInfluences[ 4 ];
#endif
#endif
`,yj=`
#ifdef USE_MORPHTARGETS
transformed += ( morphTarget0 - position ) * morphTargetInfluences[ 0 ];
transformed += ( morphTarget1 - position ) * morphTargetInfluences[ 1 ];
transformed += ( morphTarget2 - position ) * morphTargetInfluences[ 2 ];
transformed += ( morphTarget3 - position ) * morphTargetInfluences[ 3 ];
#ifndef USE_MORPHNORMALS
transformed += ( morphTarget4 - position ) * morphTargetInfluences[ 4 ];
transformed += ( morphTarget5 - position ) * morphTargetInfluences[ 5 ];
transformed += ( morphTarget6 - position ) * morphTargetInfluences[ 6 ];
transformed += ( morphTarget7 - position ) * morphTargetInfluences[ 7 ];
#endif
#endif
`,wj=`
#ifdef FLAT_SHADED
// Workaround for Adreno/Nexus5 not able able to do dFdx( vViewPosition ) ...
vec3 fdx = vec3( dFdx( vViewPosition.x ), dFdx( vViewPosition.y ), dFdx( vViewPosition.z ) );
vec3 fdy = vec3( dFdy( vViewPosition.x ), dFdy( vViewPosition.y ), dFdy( vViewPosition.z ) );
vec3 normal = normalize( cross( fdx, fdy ) );
#else
vec3 normal = normalize( vNormal );
#ifdef DOUBLE_SIDED
normal = normal * ( float( gl_FrontFacing ) * 2.0 - 1.0 );
#endif
#endif
`,Cj=`
#ifdef USE_NORMALMAP
#ifdef OBJECTSPACE_NORMALMAP
normal = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0; // overrides both flatShading and attribute normals
#ifdef FLIP_SIDED
normal = - normal;
#endif
#ifdef DOUBLE_SIDED
normal = normal * ( float( gl_FrontFacing ) * 2.0 - 1.0 );
#endif
normal = normalize( normalMatrix * normal );
#else // tangent-space normal map
normal = perturbNormal2Arb( -vViewPosition, normal );
#endif
#elif defined( USE_BUMPMAP )
normal = perturbNormalArb( -vViewPosition, normal, dHdxy_fwd() );
#endif
`,Ej=`
#ifdef USE_NORMALMAP
uniform sampler2D normalMap;
uniform vec2 normalScale;
#ifdef OBJECTSPACE_NORMALMAP
uniform mat3 normalMatrix;
#else
// Per-Pixel Tangent Space Normal Mapping
// http://hacksoflife.blogspot.ch/2009/11/per-pixel-tangent-space-normal-mapping.html
vec3 perturbNormal2Arb( vec3 eye_pos, vec3 surf_norm ) {
// Workaround for Adreno 3XX dFd*( vec3 ) bug. See #9988
vec3 q0 = vec3( dFdx( eye_pos.x ), dFdx( eye_pos.y ), dFdx( eye_pos.z ) );
vec3 q1 = vec3( dFdy( eye_pos.x ), dFdy( eye_pos.y ), dFdy( eye_pos.z ) );
vec2 st0 = dFdx( vUv.st );
vec2 st1 = dFdy( vUv.st );
float scale = sign( st1.t * st0.s - st0.t * st1.s ); // we do not care about the magnitude
vec3 S = normalize( ( q0 * st1.t - q1 * st0.t ) * scale );
vec3 T = normalize( ( - q0 * st1.s + q1 * st0.s ) * scale );
vec3 N = normalize( surf_norm );
mat3 tsn = mat3( S, T, N );
vec3 mapN = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0;
mapN.xy *= normalScale;
mapN.xy *= ( float( gl_FrontFacing ) * 2.0 - 1.0 );
return normalize( tsn * mapN );
}
#endif
#endif
`,kj=`
vec3 packNormalToRGB( const in vec3 normal ) {
return normalize( normal ) * 0.5 + 0.5;
}
vec3 unpackRGBToNormal( const in vec3 rgb ) {
return 2.0 * rgb.xyz - 1.0;
}
const float PackUpscale = 256. / 255.; // fraction -> 0..1 (including 1)
const float UnpackDownscale = 255. / 256.; // 0..1 -> fraction (excluding 1)
const vec3 PackFactors = vec3( 256. * 256. * 256., 256. * 256., 256. );
const vec4 UnpackFactors = UnpackDownscale / vec4( PackFactors, 1. );
const float ShiftRight8 = 1. / 256.;
vec4 packDepthToRGBA( const in float v ) {
vec4 r = vec4( fract( v * PackFactors ), v );
r.yzw -= r.xyz * ShiftRight8; // tidy overflow
return r * PackUpscale;
}
float unpackRGBAToDepth( const in vec4 v ) {
return dot( v, UnpackFactors );
}
// NOTE: viewZ/eyeZ is < 0 when in front of the camera per OpenGL conventions
float viewZToOrthographicDepth( const in float viewZ, const in float near, const in float far ) {
return ( viewZ + near ) / ( near - far );
}
float orthographicDepthToViewZ( const in float linearClipZ, const in float near, const in float far ) {
return linearClipZ * ( near - far ) - near;
}
float viewZToPerspectiveDepth( const in float viewZ, const in float near, const in float far ) {
return (( near + viewZ ) * far ) / (( far - near ) * viewZ );
}
float perspectiveDepthToViewZ( const in float invClipZ, const in float near, const in float far ) {
return ( near * far ) / ( ( far - near ) * invClipZ - far );
}
`,Bj=`
#ifdef PREMULTIPLIED_ALPHA
// Get get normal blending with premultipled, use with CustomBlending, OneFactor, OneMinusSrcAlphaFactor, AddEquation.
gl_FragColor.rgb *= gl_FragColor.a;
#endif
`,Sj=`
vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );
gl_Position = projectionMatrix * mvPosition;
`,Dj=`
#if defined( DITHERING )
gl_FragColor.rgb = dithering( gl_FragColor.rgb );
#endif
`,xj=`
#if defined( DITHERING )
// based on https://www.shadertoy.com/view/MslGR8
vec3 dithering( vec3 color ) {
//Calculate grid position
float grid_position = rand( gl_FragCoord.xy );
//Shift the individual colors differently, thus making it even harder to see the dithering pattern
vec3 dither_shift_RGB = vec3( 0.25 / 255.0, -0.25 / 255.0, 0.25 / 255.0 );
//modify shift acording to grid position.
dither_shift_RGB = mix( 2.0 * dither_shift_RGB, -2.0 * dither_shift_RGB, grid_position );
//shift the color by dither_shift
return color + dither_shift_RGB;
}
#endif
`,Tj=`
float roughnessFactor = roughness;
#ifdef USE_ROUGHNESSMAP
vec4 texelRoughness = texture2D( roughnessMap, vUv );
// reads channel G, compatible with a combined OcclusionRoughnessMetallic (RGB) texture
roughnessFactor *= texelRoughness.g;
#endif
`,Ij=`
#ifdef USE_ROUGHNESSMAP
uniform sampler2D roughnessMap;
#endif
`,Pj=`
#ifdef USE_SHADOWMAP
#if NUM_DIR_LIGHTS > 0
uniform sampler2D directionalShadowMap[ NUM_DIR_LIGHTS ];
varying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHTS ];
#endif
#if NUM_SPOT_LIGHTS > 0
uniform sampler2D spotShadowMap[ NUM_SPOT_LIGHTS ];
varying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHTS ];
#endif
#if NUM_POINT_LIGHTS > 0
uniform sampler2D pointShadowMap[ NUM_POINT_LIGHTS ];
varying vec4 vPointShadowCoord[ NUM_POINT_LIGHTS ];
#endif
/*
#if NUM_RECT_AREA_LIGHTS > 0
// TODO (abelnation): create uniforms for area light shadows
#endif
*/
float texture2DCompare( sampler2D depths, vec2 uv, float compare ) {
return step( compare, unpackRGBAToDepth( texture2D( depths, uv ) ) );
}
float texture2DShadowLerp( sampler2D depths, vec2 size, vec2 uv, float compare ) {
const vec2 offset = vec2( 0.0, 1.0 );
vec2 texelSize = vec2( 1.0 ) / size;
vec2 centroidUV = floor( uv * size + 0.5 ) / size;
float lb = texture2DCompare( depths, centroidUV + texelSize * offset.xx, compare );
float lt = texture2DCompare( depths, centroidUV + texelSize * offset.xy, compare );
float rb = texture2DCompare( depths, centroidUV + texelSize * offset.yx, compare );
float rt = texture2DCompare( depths, centroidUV + texelSize * offset.yy, compare );
vec2 f = fract( uv * size + 0.5 );
float a = mix( lb, lt, f.y );
float b = mix( rb, rt, f.y );
float c = mix( a, b, f.x );
return c;
}
float getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord ) {
float shadow = 1.0;
shadowCoord.xyz /= shadowCoord.w;
shadowCoord.z += shadowBias;
// if ( something && something ) breaks ATI OpenGL shader compiler
// if ( all( something, something ) ) using this instead
bvec4 inFrustumVec = bvec4 ( shadowCoord.x >= 0.0, shadowCoord.x <= 1.0, shadowCoord.y >= 0.0, shadowCoord.y <= 1.0 );
bool inFrustum = all( inFrustumVec );
bvec2 frustumTestVec = bvec2( inFrustum, shadowCoord.z <= 1.0 );
bool frustumTest = all( frustumTestVec );
if ( frustumTest ) {
#if defined( SHADOWMAP_TYPE_PCF )
vec2 texelSize = vec2( 1.0 ) / shadowMapSize;
float dx0 = - texelSize.x * shadowRadius;
float dy0 = - texelSize.y * shadowRadius;
float dx1 = + texelSize.x * shadowRadius;
float dy1 = + texelSize.y * shadowRadius;
shadow = (
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +
texture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +
texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z ) +
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +
texture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +
texture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )
) * ( 1.0 / 9.0 );
#elif defined( SHADOWMAP_TYPE_PCF_SOFT )
vec2 texelSize = vec2( 1.0 ) / shadowMapSize;
float dx0 = - texelSize.x * shadowRadius;
float dy0 = - texelSize.y * shadowRadius;
float dx1 = + texelSize.x * shadowRadius;
float dy1 = + texelSize.y * shadowRadius;
shadow = (
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy, shadowCoord.z ) +
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +
texture2DShadowLerp( shadowMap, shadowMapSize, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )
) * ( 1.0 / 9.0 );
#else // no percentage-closer filtering:
shadow = texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z );
#endif
}
return shadow;
}
// cubeToUV() maps a 3D direction vector suitable for cube texture mapping to a 2D
// vector suitable for 2D texture mapping. This code uses the following layout for the
// 2D texture:
//
// xzXZ
// y Y
//
// Y - Positive y direction
// y - Negative y direction
// X - Positive x direction
// x - Negative x direction
// Z - Positive z direction
// z - Negative z direction
//
// Source and test bed:
// https://gist.github.com/tschw/da10c43c467ce8afd0c4
vec2 cubeToUV( vec3 v, float texelSizeY ) {
// Number of texels to avoid at the edge of each square
vec3 absV = abs( v );
// Intersect unit cube
float scaleToCube = 1.0 / max( absV.x, max( absV.y, absV.z ) );
absV *= scaleToCube;
// Apply scale to avoid seams
// two texels less per square (one texel will do for NEAREST)
v *= scaleToCube * ( 1.0 - 2.0 * texelSizeY );
// Unwrap
// space: -1 ... 1 range for each square
//
// #X## dim := ( 4 , 2 )
// # # center := ( 1 , 1 )
vec2 planar = v.xy;
float almostATexel = 1.5 * texelSizeY;
float almostOne = 1.0 - almostATexel;
if ( absV.z >= almostOne ) {
if ( v.z > 0.0 )
planar.x = 4.0 - v.x;
} else if ( absV.x >= almostOne ) {
float signX = sign( v.x );
planar.x = v.z * signX + 2.0 * signX;
} else if ( absV.y >= almostOne ) {
float signY = sign( v.y );
planar.x = v.x + 2.0 * signY + 2.0;
planar.y = v.z * signY - 2.0;
}
// Transform to UV space
// scale := 0.5 / dim
// translate := ( center + 0.5 ) / dim
return vec2( 0.125, 0.25 ) * planar + vec2( 0.375, 0.75 );
}
float getPointShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {
vec2 texelSize = vec2( 1.0 ) / ( shadowMapSize * vec2( 4.0, 2.0 ) );
// for point lights, the uniform @vShadowCoord is re-purposed to hold
// the vector from the light to the world-space position of the fragment.
vec3 lightToPosition = shadowCoord.xyz;
// dp = normalized distance from light to fragment position
float dp = ( length( lightToPosition ) - shadowCameraNear ) / ( shadowCameraFar - shadowCameraNear ); // need to clamp?
dp += shadowBias;
// bd3D = base direction 3D
vec3 bd3D = normalize( lightToPosition );
#if defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_PCF_SOFT )
vec2 offset = vec2( - 1, 1 ) * shadowRadius * texelSize.y;
return (
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyy, texelSize.y ), dp ) +
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyy, texelSize.y ), dp ) +
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyx, texelSize.y ), dp ) +
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyx, texelSize.y ), dp ) +
texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp ) +
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxy, texelSize.y ), dp ) +
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxy, texelSize.y ), dp ) +
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxx, texelSize.y ), dp ) +
texture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxx, texelSize.y ), dp )
) * ( 1.0 / 9.0 );
#else // no percentage-closer filtering
return texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp );
#endif
}
#endif
`,Mj=`
#ifdef USE_SHADOWMAP
#if NUM_DIR_LIGHTS > 0
uniform mat4 directionalShadowMatrix[ NUM_DIR_LIGHTS ];
varying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHTS ];
#endif
#if NUM_SPOT_LIGHTS > 0
uniform mat4 spotShadowMatrix[ NUM_SPOT_LIGHTS ];
varying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHTS ];
#endif
#if NUM_POINT_LIGHTS > 0
uniform mat4 pointShadowMatrix[ NUM_POINT_LIGHTS ];
varying vec4 vPointShadowCoord[ NUM_POINT_LIGHTS ];
#endif
/*
#if NUM_RECT_AREA_LIGHTS > 0
// TODO (abelnation): uniforms for area light shadows
#endif
*/
#endif
`,Lj=`
#ifdef USE_SHADOWMAP
#if NUM_DIR_LIGHTS > 0
#pragma unroll_loop
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
vDirectionalShadowCoord[ i ] = directionalShadowMatrix[ i ] * worldPosition;
}
#endif
#if NUM_SPOT_LIGHTS > 0
#pragma unroll_loop
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
vSpotShadowCoord[ i ] = spotShadowMatrix[ i ] * worldPosition;
}
#endif
#if NUM_POINT_LIGHTS > 0
#pragma unroll_loop
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
vPointShadowCoord[ i ] = pointShadowMatrix[ i ] * worldPosition;
}
#endif
/*
#if NUM_RECT_AREA_LIGHTS > 0
// TODO (abelnation): update vAreaShadowCoord with area light info
#endif
*/
#endif
`,Rj=`
float getShadowMask() {
float shadow = 1.0;
#ifdef USE_SHADOWMAP
#if NUM_DIR_LIGHTS > 0
DirectionalLight directionalLight;
#pragma unroll_loop
for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
directionalLight = directionalLights[ i ];
shadow *= bool( directionalLight.shadow ) ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
}
#endif
#if NUM_SPOT_LIGHTS > 0
SpotLight spotLight;
#pragma unroll_loop
for ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {
spotLight = spotLights[ i ];
shadow *= bool( spotLight.shadow ) ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowBias, spotLight.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;
}
#endif
#if NUM_POINT_LIGHTS > 0
PointLight pointLight;
#pragma unroll_loop
for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {
pointLight = pointLights[ i ];
shadow *= bool( pointLight.shadow ) ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;
}
#endif
/*
#if NUM_RECT_AREA_LIGHTS > 0
// TODO (abelnation): update shadow for Area light
#endif
*/
#endif
return shadow;
}
`,Uj=`
#ifdef USE_SKINNING
mat4 boneMatX = getBoneMatrix( skinIndex.x );
mat4 boneMatY = getBoneMatrix( skinIndex.y );
mat4 boneMatZ = getBoneMatrix( skinIndex.z );
mat4 boneMatW = getBoneMatrix( skinIndex.w );
#endif
`,Oj=`
#ifdef USE_SKINNING
uniform mat4 bindMatrix;
uniform mat4 bindMatrixInverse;
#ifdef BONE_TEXTURE
uniform sampler2D boneTexture;
uniform int boneTextureSize;
mat4 getBoneMatrix( const in float i ) {
float j = i * 4.0;
float x = mod( j, float( boneTextureSize ) );
float y = floor( j / float( boneTextureSize ) );
float dx = 1.0 / float( boneTextureSize );
float dy = 1.0 / float( boneTextureSize );
y = dy * ( y + 0.5 );
vec4 v1 = texture2D( boneTexture, vec2( dx * ( x + 0.5 ), y ) );
vec4 v2 = texture2D( boneTexture, vec2( dx * ( x + 1.5 ), y ) );
vec4 v3 = texture2D( boneTexture, vec2( dx * ( x + 2.5 ), y ) );
vec4 v4 = texture2D( boneTexture, vec2( dx * ( x + 3.5 ), y ) );
mat4 bone = mat4( v1, v2, v3, v4 );
return bone;
}
#else
uniform mat4 boneMatrices[ MAX_BONES ];
mat4 getBoneMatrix( const in float i ) {
mat4 bone = boneMatrices[ int(i) ];
return bone;
}
#endif
#endif
`,Nj=`
#ifdef USE_SKINNING
vec4 skinVertex = bindMatrix * vec4( transformed, 1.0 );
vec4 skinned = vec4( 0.0 );
skinned += boneMatX * skinVertex * skinWeight.x;
skinned += boneMatY * skinVertex * skinWeight.y;
skinned += boneMatZ * skinVertex * skinWeight.z;
skinned += boneMatW * skinVertex * skinWeight.w;
transformed = ( bindMatrixInverse * skinned ).xyz;
#endif
`,Hj=`
#ifdef USE_SKINNING
mat4 skinMatrix = mat4( 0.0 );
skinMatrix += skinWeight.x * boneMatX;
skinMatrix += skinWeight.y * boneMatY;
skinMatrix += skinWeight.z * boneMatZ;
skinMatrix += skinWeight.w * boneMatW;
skinMatrix = bindMatrixInverse * skinMatrix * bindMatrix;
objectNormal = vec4( skinMatrix * vec4( objectNormal, 0.0 ) ).xyz;
#endif
`,Qj=`
float specularStrength;
#ifdef USE_SPECULARMAP
vec4 texelSpecular = texture2D( specularMap, vUv );
specularStrength = texelSpecular.r;
#else
specularStrength = 1.0;
#endif
`,jj=`
#ifdef USE_SPECULARMAP
uniform sampler2D specularMap;
#endif
`,Gj=`
#if defined( TONE_MAPPING )
gl_FragColor.rgb = toneMapping( gl_FragColor.rgb );
#endif
`,zj=`
#ifndef saturate
#define saturate(a) clamp( a, 0.0, 1.0 )
#endif
uniform float toneMappingExposure;
uniform float toneMappingWhitePoint;
// exposure only
vec3 LinearToneMapping( vec3 color ) {
return toneMappingExposure * color;
}
// source: https://www.cs.utah.edu/~reinhard/cdrom/
vec3 ReinhardToneMapping( vec3 color ) {
color *= toneMappingExposure;
return saturate( color / ( vec3( 1.0 ) + color ) );
}
// source: http://filmicgames.com/archives/75
#define Uncharted2Helper( x ) max( ( ( x * ( 0.15 * x + 0.10 * 0.50 ) + 0.20 * 0.02 ) / ( x * ( 0.15 * x + 0.50 ) + 0.20 * 0.30 ) ) - 0.02 / 0.30, vec3( 0.0 ) )
vec3 Uncharted2ToneMapping( vec3 color ) {
// John Hable's filmic operator from Uncharted 2 video game
color *= toneMappingExposure;
return saturate( Uncharted2Helper( color ) / Uncharted2Helper( vec3( toneMappingWhitePoint ) ) );
}
// source: http://filmicgames.com/archives/75
vec3 OptimizedCineonToneMapping( vec3 color ) {
// optimized filmic operator by Jim Hejl and Richard Burgess-Dawson
color *= toneMappingExposure;
color = max( vec3( 0.0 ), color - 0.004 );
return pow( ( color * ( 6.2 * color + 0.5 ) ) / ( color * ( 6.2 * color + 1.7 ) + 0.06 ), vec3( 2.2 ) );
}
// source: https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/
vec3 ACESFilmicToneMapping( vec3 color ) {
color *= toneMappingExposure;
return saturate( ( color * ( 2.51 * color + 0.03 ) ) / ( color * ( 2.43 * color + 0.59 ) + 0.14 ) );
}
`,qj=`
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
varying vec2 vUv;
#endif
`,Vj=`
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
varying vec2 vUv;
uniform mat3 uvTransform;
#endif
`,Wj=`
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
vUv = ( uvTransform * vec3( uv, 1 ) ).xy;
#endif
`,Kj=`
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
varying vec2 vUv2;
#endif
`,Yj=`
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
attribute vec2 uv2;
varying vec2 vUv2;
#endif
`,Xj=`
#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )
vUv2 = uv2;
#endif
`,Jj=`
#if defined( USE_ENVMAP ) || defined( DISTANCE ) || defined ( USE_SHADOWMAP )
vec4 worldPosition = modelMatrix * vec4( transformed, 1.0 );
#endif
`,Zj=`
uniform sampler2D t2D;
varying vec2 vUv;
void main() {
vec4 texColor = texture2D( t2D, vUv );
gl_FragColor = mapTexelToLinear( texColor );
#include <tonemapping_fragment>
#include <encodings_fragment>
}
`,$j=`
varying vec2 vUv;
uniform mat3 uvTransform;
void main() {
vUv = ( uvTransform * vec3( uv, 1 ) ).xy;
gl_Position = vec4( position.xy, 1.0, 1.0 );
}
`,eG=`
uniform samplerCube tCube;
uniform float tFlip;
uniform float opacity;
varying vec3 vWorldDirection;
void main() {
vec4 texColor = textureCube( tCube, vec3( tFlip * vWorldDirection.x, vWorldDirection.yz ) );
gl_FragColor = mapTexelToLinear( texColor );
gl_FragColor.a *= opacity;
#include <tonemapping_fragment>
#include <encodings_fragment>
}
`,tG=`
varying vec3 vWorldDirection;
#include <common>
void main() {
vWorldDirection = transformDirection( position, modelMatrix );
#include <begin_vertex>
#include <project_vertex>
gl_Position.z = gl_Position.w; // set z to camera.far
}
`,iG=`
#if DEPTH_PACKING == 3200
uniform float opacity;
#endif
#include <common>
#include <packing>
#include <uv_pars_fragment>
#include <map_pars_fragment>
#include <alphamap_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec4 diffuseColor = vec4( 1.0 );
#if DEPTH_PACKING == 3200
diffuseColor.a = opacity;
#endif
#include <map_fragment>
#include <alphamap_fragment>
#include <alphatest_fragment>
#include <logdepthbuf_fragment>
#if DEPTH_PACKING == 3200
gl_FragColor = vec4( vec3( 1.0 - gl_FragCoord.z ), opacity );
#elif DEPTH_PACKING == 3201
gl_FragColor = packDepthToRGBA( gl_FragCoord.z );
#endif
}
`,nG=`
#include <common>
#include <uv_pars_vertex>
#include <displacementmap_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
#include <skinbase_vertex>
#ifdef USE_DISPLACEMENTMAP
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinnormal_vertex>
#endif
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <displacementmap_vertex>
#include <project_vertex>
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
}
`,aG=`
#define DISTANCE
uniform vec3 referencePosition;
uniform float nearDistance;
uniform float farDistance;
varying vec3 vWorldPosition;
#include <common>
#include <packing>
#include <uv_pars_fragment>
#include <map_pars_fragment>
#include <alphamap_pars_fragment>
#include <clipping_planes_pars_fragment>
void main () {
#include <clipping_planes_fragment>
vec4 diffuseColor = vec4( 1.0 );
#include <map_fragment>
#include <alphamap_fragment>
#include <alphatest_fragment>
float dist = length( vWorldPosition - referencePosition );
dist = ( dist - nearDistance ) / ( farDistance - nearDistance );
dist = saturate( dist ); // clamp to [ 0, 1 ]
gl_FragColor = packDepthToRGBA( dist );
}
`,sG=`
#define DISTANCE
varying vec3 vWorldPosition;
#include <common>
#include <uv_pars_vertex>
#include <displacementmap_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
#include <skinbase_vertex>
#ifdef USE_DISPLACEMENTMAP
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinnormal_vertex>
#endif
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <displacementmap_vertex>
#include <project_vertex>
#include <worldpos_vertex>
#include <clipping_planes_vertex>
vWorldPosition = worldPosition.xyz;
}
`,rG=`
uniform sampler2D tEquirect;
varying vec3 vWorldDirection;
#include <common>
void main() {
vec3 direction = normalize( vWorldDirection );
vec2 sampleUV;
sampleUV.y = asin( clamp( direction.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
sampleUV.x = atan( direction.z, direction.x ) * RECIPROCAL_PI2 + 0.5;
vec4 texColor = texture2D( tEquirect, sampleUV );
gl_FragColor = mapTexelToLinear( texColor );
#include <tonemapping_fragment>
#include <encodings_fragment>
}
`,oG=`
varying vec3 vWorldDirection;
#include <common>
void main() {
vWorldDirection = transformDirection( position, modelMatrix );
#include <begin_vertex>
#include <project_vertex>
}
`,lG=`
uniform vec3 diffuse;
uniform float opacity;
uniform float dashSize;
uniform float totalSize;
varying float vLineDistance;
#include <common>
#include <color_pars_fragment>
#include <fog_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
if ( mod( vLineDistance, totalSize ) > dashSize ) {
discard;
}
vec3 outgoingLight = vec3( 0.0 );
vec4 diffuseColor = vec4( diffuse, opacity );
#include <logdepthbuf_fragment>
#include <color_fragment>
outgoingLight = diffuseColor.rgb; // simple shader
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
#include <premultiplied_alpha_fragment>
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
}
`,uG=`
uniform float scale;
attribute float lineDistance;
varying float vLineDistance;
#include <common>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <color_vertex>
vLineDistance = scale * lineDistance;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
#include <fog_vertex>
}
`,cG=`
uniform vec3 diffuse;
uniform float opacity;
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
#include <common>
#include <color_pars_fragment>
#include <uv_pars_fragment>
#include <uv2_pars_fragment>
#include <map_pars_fragment>
#include <alphamap_pars_fragment>
#include <aomap_pars_fragment>
#include <lightmap_pars_fragment>
#include <envmap_pars_fragment>
#include <fog_pars_fragment>
#include <specularmap_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec4 diffuseColor = vec4( diffuse, opacity );
#include <logdepthbuf_fragment>
#include <map_fragment>
#include <color_fragment>
#include <alphamap_fragment>
#include <alphatest_fragment>
#include <specularmap_fragment>
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
// accumulation (baked indirect lighting only)
#ifdef USE_LIGHTMAP
reflectedLight.indirectDiffuse += texture2D( lightMap, vUv2 ).xyz * lightMapIntensity;
#else
reflectedLight.indirectDiffuse += vec3( 1.0 );
#endif
// modulation
#include <aomap_fragment>
reflectedLight.indirectDiffuse *= diffuseColor.rgb;
vec3 outgoingLight = reflectedLight.indirectDiffuse;
#include <envmap_fragment>
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
#include <premultiplied_alpha_fragment>
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
}
`,dG=`
#include <common>
#include <uv_pars_vertex>
#include <uv2_pars_vertex>
#include <envmap_pars_vertex>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
#include <uv2_vertex>
#include <color_vertex>
#include <skinbase_vertex>
#ifdef USE_ENVMAP
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinnormal_vertex>
#include <defaultnormal_vertex>
#endif
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <project_vertex>
#include <logdepthbuf_vertex>
#include <worldpos_vertex>
#include <clipping_planes_vertex>
#include <envmap_vertex>
#include <fog_vertex>
}
`,fG=`
uniform vec3 diffuse;
uniform vec3 emissive;
uniform float opacity;
varying vec3 vLightFront;
#ifdef DOUBLE_SIDED
varying vec3 vLightBack;
#endif
#include <common>
#include <packing>
#include <dithering_pars_fragment>
#include <color_pars_fragment>
#include <uv_pars_fragment>
#include <uv2_pars_fragment>
#include <map_pars_fragment>
#include <alphamap_pars_fragment>
#include <aomap_pars_fragment>
#include <lightmap_pars_fragment>
#include <emissivemap_pars_fragment>
#include <envmap_pars_fragment>
#include <bsdfs>
#include <lights_pars_begin>
#include <fog_pars_fragment>
#include <shadowmap_pars_fragment>
#include <shadowmask_pars_fragment>
#include <specularmap_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec4 diffuseColor = vec4( diffuse, opacity );
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
vec3 totalEmissiveRadiance = emissive;
#include <logdepthbuf_fragment>
#include <map_fragment>
#include <color_fragment>
#include <alphamap_fragment>
#include <alphatest_fragment>
#include <specularmap_fragment>
#include <emissivemap_fragment>
// accumulation
reflectedLight.indirectDiffuse = getAmbientLightIrradiance( ambientLightColor );
#include <lightmap_fragment>
reflectedLight.indirectDiffuse *= BRDF_Diffuse_Lambert( diffuseColor.rgb );
#ifdef DOUBLE_SIDED
reflectedLight.directDiffuse = ( gl_FrontFacing ) ? vLightFront : vLightBack;
#else
reflectedLight.directDiffuse = vLightFront;
#endif
reflectedLight.directDiffuse *= BRDF_Diffuse_Lambert( diffuseColor.rgb ) * getShadowMask();
// modulation
#include <aomap_fragment>
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;
#include <envmap_fragment>
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
#include <premultiplied_alpha_fragment>
#include <dithering_fragment>
}
`,AG=`
#define LAMBERT
varying vec3 vLightFront;
#ifdef DOUBLE_SIDED
varying vec3 vLightBack;
#endif
#include <common>
#include <uv_pars_vertex>
#include <uv2_pars_vertex>
#include <envmap_pars_vertex>
#include <bsdfs>
#include <lights_pars_begin>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <shadowmap_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
#include <uv2_vertex>
#include <color_vertex>
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinbase_vertex>
#include <skinnormal_vertex>
#include <defaultnormal_vertex>
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <project_vertex>
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
#include <worldpos_vertex>
#include <envmap_vertex>
#include <lights_lambert_vertex>
#include <shadowmap_vertex>
#include <fog_vertex>
}
`,hG=`
#define MATCAP
uniform vec3 diffuse;
uniform float opacity;
uniform sampler2D matcap;
varying vec3 vViewPosition;
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
#include <common>
#include <uv_pars_fragment>
#include <map_pars_fragment>
#include <alphamap_pars_fragment>
#include <fog_pars_fragment>
#include <bumpmap_pars_fragment>
#include <normalmap_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec4 diffuseColor = vec4( diffuse, opacity );
#include <logdepthbuf_fragment>
#include <map_fragment>
#include <alphamap_fragment>
#include <alphatest_fragment>
#include <normal_fragment_begin>
#include <normal_fragment_maps>
vec3 viewDir = normalize( vViewPosition );
vec3 x = normalize( vec3( viewDir.z, 0.0, - viewDir.x ) );
vec3 y = cross( viewDir, x );
vec2 uv = vec2( dot( x, normal ), dot( y, normal ) ) * 0.495 + 0.5; // 0.495 to remove artifacts caused by undersized matcap disks
#ifdef USE_MATCAP
vec4 matcapColor = texture2D( matcap, uv );
matcapColor = matcapTexelToLinear( matcapColor );
#else
vec4 matcapColor = vec4( 1.0 );
#endif
vec3 outgoingLight = diffuseColor.rgb * matcapColor.rgb;
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
#include <premultiplied_alpha_fragment>
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
}
`,gG=`
#define MATCAP
varying vec3 vViewPosition;
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
#include <common>
#include <uv_pars_vertex>
#include <displacementmap_pars_vertex>
#include <fog_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinbase_vertex>
#include <skinnormal_vertex>
#include <defaultnormal_vertex>
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
vNormal = normalize( transformedNormal );
#endif
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <displacementmap_vertex>
#include <project_vertex>
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
#include <fog_vertex>
vViewPosition = - mvPosition.xyz;
}
`,pG=`
#define PHONG
uniform vec3 diffuse;
uniform vec3 emissive;
uniform vec3 specular;
uniform float shininess;
uniform float opacity;
#include <common>
#include <packing>
#include <dithering_pars_fragment>
#include <color_pars_fragment>
#include <uv_pars_fragment>
#include <uv2_pars_fragment>
#include <map_pars_fragment>
#include <alphamap_pars_fragment>
#include <aomap_pars_fragment>
#include <lightmap_pars_fragment>
#include <emissivemap_pars_fragment>
#include <envmap_pars_fragment>
#include <gradientmap_pars_fragment>
#include <fog_pars_fragment>
#include <bsdfs>
#include <lights_pars_begin>
#include <lights_phong_pars_fragment>
#include <shadowmap_pars_fragment>
#include <bumpmap_pars_fragment>
#include <normalmap_pars_fragment>
#include <specularmap_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec4 diffuseColor = vec4( diffuse, opacity );
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
vec3 totalEmissiveRadiance = emissive;
#include <logdepthbuf_fragment>
#include <map_fragment>
#include <color_fragment>
#include <alphamap_fragment>
#include <alphatest_fragment>
#include <specularmap_fragment>
#include <normal_fragment_begin>
#include <normal_fragment_maps>
#include <emissivemap_fragment>
// accumulation
#include <lights_phong_fragment>
#include <lights_fragment_begin>
#include <lights_fragment_maps>
#include <lights_fragment_end>
// modulation
#include <aomap_fragment>
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;
#include <envmap_fragment>
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
#include <premultiplied_alpha_fragment>
#include <dithering_fragment>
}
`,mG=`
#define PHONG
varying vec3 vViewPosition;
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
#include <common>
#include <uv_pars_vertex>
#include <uv2_pars_vertex>
#include <displacementmap_pars_vertex>
#include <envmap_pars_vertex>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <shadowmap_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
#include <uv2_vertex>
#include <color_vertex>
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinbase_vertex>
#include <skinnormal_vertex>
#include <defaultnormal_vertex>
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
vNormal = normalize( transformedNormal );
#endif
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <displacementmap_vertex>
#include <project_vertex>
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
vViewPosition = - mvPosition.xyz;
#include <worldpos_vertex>
#include <envmap_vertex>
#include <shadowmap_vertex>
#include <fog_vertex>
}
`,_G=`
#define PHYSICAL
uniform vec3 diffuse;
uniform vec3 emissive;
uniform float roughness;
uniform float metalness;
uniform float opacity;
#ifndef STANDARD
uniform float clearCoat;
uniform float clearCoatRoughness;
#endif
varying vec3 vViewPosition;
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
#include <common>
#include <packing>
#include <dithering_pars_fragment>
#include <color_pars_fragment>
#include <uv_pars_fragment>
#include <uv2_pars_fragment>
#include <map_pars_fragment>
#include <alphamap_pars_fragment>
#include <aomap_pars_fragment>
#include <lightmap_pars_fragment>
#include <emissivemap_pars_fragment>
#include <bsdfs>
#include <cube_uv_reflection_fragment>
#include <envmap_pars_fragment>
#include <envmap_physical_pars_fragment>
#include <fog_pars_fragment>
#include <lights_pars_begin>
#include <lights_physical_pars_fragment>
#include <shadowmap_pars_fragment>
#include <bumpmap_pars_fragment>
#include <normalmap_pars_fragment>
#include <roughnessmap_pars_fragment>
#include <metalnessmap_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec4 diffuseColor = vec4( diffuse, opacity );
ReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );
vec3 totalEmissiveRadiance = emissive;
#include <logdepthbuf_fragment>
#include <map_fragment>
#include <color_fragment>
#include <alphamap_fragment>
#include <alphatest_fragment>
#include <roughnessmap_fragment>
#include <metalnessmap_fragment>
#include <normal_fragment_begin>
#include <normal_fragment_maps>
#include <emissivemap_fragment>
// accumulation
#include <lights_physical_fragment>
#include <lights_fragment_begin>
#include <lights_fragment_maps>
#include <lights_fragment_end>
// modulation
#include <aomap_fragment>
vec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
#include <premultiplied_alpha_fragment>
#include <dithering_fragment>
}
`,FG=`
#define PHYSICAL
varying vec3 vViewPosition;
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
#include <common>
#include <uv_pars_vertex>
#include <uv2_pars_vertex>
#include <displacementmap_pars_vertex>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <shadowmap_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
#include <uv2_vertex>
#include <color_vertex>
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinbase_vertex>
#include <skinnormal_vertex>
#include <defaultnormal_vertex>
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
vNormal = normalize( transformedNormal );
#endif
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <displacementmap_vertex>
#include <project_vertex>
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
vViewPosition = - mvPosition.xyz;
#include <worldpos_vertex>
#include <shadowmap_vertex>
#include <fog_vertex>
}
`,bG=`
#define NORMAL
uniform float opacity;
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
varying vec3 vViewPosition;
#endif
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
#include <packing>
#include <uv_pars_fragment>
#include <bumpmap_pars_fragment>
#include <normalmap_pars_fragment>
#include <logdepthbuf_pars_fragment>
void main() {
#include <logdepthbuf_fragment>
#include <normal_fragment_begin>
#include <normal_fragment_maps>
gl_FragColor = vec4( packNormalToRGB( normal ), opacity );
}
`,vG=`
#define NORMAL
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
varying vec3 vViewPosition;
#endif
#ifndef FLAT_SHADED
varying vec3 vNormal;
#endif
#include <uv_pars_vertex>
#include <displacementmap_pars_vertex>
#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
#include <logdepthbuf_pars_vertex>
void main() {
#include <uv_vertex>
#include <beginnormal_vertex>
#include <morphnormal_vertex>
#include <skinbase_vertex>
#include <skinnormal_vertex>
#include <defaultnormal_vertex>
#ifndef FLAT_SHADED // Normal computed with derivatives when FLAT_SHADED
vNormal = normalize( transformedNormal );
#endif
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <displacementmap_vertex>
#include <project_vertex>
#include <logdepthbuf_vertex>
#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || ( defined( USE_NORMALMAP ) && ! defined( OBJECTSPACE_NORMALMAP ) )
vViewPosition = - mvPosition.xyz;
#endif
}
`,yG=`
uniform vec3 diffuse;
uniform float opacity;
#include <common>
#include <color_pars_fragment>
#include <map_particle_pars_fragment>
#include <fog_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec3 outgoingLight = vec3( 0.0 );
vec4 diffuseColor = vec4( diffuse, opacity );
#include <logdepthbuf_fragment>
#include <map_particle_fragment>
#include <color_fragment>
#include <alphatest_fragment>
outgoingLight = diffuseColor.rgb;
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
#include <premultiplied_alpha_fragment>
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
}
`,wG=`
uniform float size;
uniform float scale;
#include <common>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <morphtarget_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <color_vertex>
#include <begin_vertex>
#include <morphtarget_vertex>
#include <project_vertex>
gl_PointSize = size;
#ifdef USE_SIZEATTENUATION
bool isPerspective = ( projectionMatrix[ 2 ][ 3 ] == - 1.0 );
if ( isPerspective ) gl_PointSize *= ( scale / - mvPosition.z );
#endif
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
#include <worldpos_vertex>
#include <fog_vertex>
}
`,CG=`
uniform vec3 color;
uniform float opacity;
#include <common>
#include <packing>
#include <fog_pars_fragment>
#include <bsdfs>
#include <lights_pars_begin>
#include <shadowmap_pars_fragment>
#include <shadowmask_pars_fragment>
void main() {
gl_FragColor = vec4( color, opacity * ( 1.0 - getShadowMask() ) );
#include <fog_fragment>
}
`,EG=`
#include <fog_pars_vertex>
#include <shadowmap_pars_vertex>
void main() {
#include <begin_vertex>
#include <project_vertex>
#include <worldpos_vertex>
#include <shadowmap_vertex>
#include <fog_vertex>
}
`,kG=`
uniform vec3 diffuse;
uniform float opacity;
#include <common>
#include <uv_pars_fragment>
#include <map_pars_fragment>
#include <fog_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
void main() {
#include <clipping_planes_fragment>
vec3 outgoingLight = vec3( 0.0 );
vec4 diffuseColor = vec4( diffuse, opacity );
#include <logdepthbuf_fragment>
#include <map_fragment>
#include <alphatest_fragment>
outgoingLight = diffuseColor.rgb;
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
}
`,BG=`
uniform float rotation;
uniform vec2 center;
#include <common>
#include <uv_pars_vertex>
#include <fog_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
void main() {
#include <uv_vertex>
vec4 mvPosition = modelViewMatrix * vec4( 0.0, 0.0, 0.0, 1.0 );
vec2 scale;
scale.x = length( vec3( modelMatrix[ 0 ].x, modelMatrix[ 0 ].y, modelMatrix[ 0 ].z ) );
scale.y = length( vec3( modelMatrix[ 1 ].x, modelMatrix[ 1 ].y, modelMatrix[ 1 ].z ) );
#ifndef USE_SIZEATTENUATION
bool isPerspective = ( projectionMatrix[ 2 ][ 3 ] == - 1.0 );
if ( isPerspective ) scale *= - mvPosition.z;
#endif
vec2 alignedPosition = ( position.xy - ( center - vec2( 0.5 ) ) ) * scale;
vec2 rotatedPosition;
rotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;
rotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;
mvPosition.xy += rotatedPosition;
gl_Position = projectionMatrix * mvPosition;
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
#include <fog_vertex>
}
`;var Qt={alphamap_fragment:mQ,alphamap_pars_fragment:_Q,alphatest_fragment:FQ,aomap_fragment:bQ,aomap_pars_fragment:vQ,begin_vertex:yQ,beginnormal_vertex:wQ,bsdfs:CQ,bumpmap_pars_fragment:EQ,clipping_planes_fragment:kQ,clipping_planes_pars_fragment:BQ,clipping_planes_pars_vertex:SQ,clipping_planes_vertex:DQ,color_fragment:xQ,color_pars_fragment:TQ,color_pars_vertex:IQ,color_vertex:PQ,common:MQ,cube_uv_reflection_fragment:LQ,defaultnormal_vertex:RQ,displacementmap_pars_vertex:UQ,displacementmap_vertex:OQ,emissivemap_fragment:NQ,emissivemap_pars_fragment:HQ,encodings_fragment:QQ,encodings_pars_fragment:jQ,envmap_fragment:GQ,envmap_pars_fragment:zQ,envmap_pars_vertex:qQ,envmap_physical_pars_fragment:ij,envmap_vertex:VQ,fog_vertex:WQ,fog_pars_vertex:KQ,fog_fragment:YQ,fog_pars_fragment:XQ,gradientmap_pars_fragment:JQ,lightmap_fragment:ZQ,lightmap_pars_fragment:$Q,lights_lambert_vertex:ej,lights_pars_begin:tj,lights_phong_fragment:nj,lights_phong_pars_fragment:aj,lights_physical_fragment:sj,lights_physical_pars_fragment:rj,lights_fragment_begin:oj,lights_fragment_maps:lj,lights_fragment_end:uj,logdepthbuf_fragment:cj,logdepthbuf_pars_fragment:dj,logdepthbuf_pars_vertex:fj,logdepthbuf_vertex:Aj,map_fragment:hj,map_pars_fragment:gj,map_particle_fragment:pj,map_particle_pars_fragment:mj,metalnessmap_fragment:_j,metalnessmap_pars_fragment:Fj,morphnormal_vertex:bj,morphtarget_pars_vertex:vj,morphtarget_vertex:yj,normal_fragment_begin:wj,normal_fragment_maps:Cj,normalmap_pars_fragment:Ej,packing:kj,premultiplied_alpha_fragment:Bj,project_vertex:Sj,dithering_fragment:Dj,dithering_pars_fragment:xj,roughnessmap_fragment:Tj,roughnessmap_pars_fragment:Ij,shadowmap_pars_fragment:Pj,shadowmap_pars_vertex:Mj,shadowmap_vertex:Lj,shadowmask_pars_fragment:Rj,skinbase_vertex:Uj,skinning_pars_vertex:Oj,skinning_vertex:Nj,skinnormal_vertex:Hj,specularmap_fragment:Qj,specularmap_pars_fragment:jj,tonemapping_fragment:Gj,tonemapping_pars_fragment:zj,uv_pars_fragment:qj,uv_pars_vertex:Vj,uv_vertex:Wj,uv2_pars_fragment:Kj,uv2_pars_vertex:Yj,uv2_vertex:Xj,worldpos_vertex:Jj,background_frag:Zj,background_vert:$j,cube_frag:eG,cube_vert:tG,depth_frag:iG,depth_vert:nG,distanceRGBA_frag:aG,distanceRGBA_vert:sG,equirect_frag:rG,equirect_vert:oG,linedashed_frag:lG,linedashed_vert:uG,meshbasic_frag:cG,meshbasic_vert:dG,meshlambert_frag:fG,meshlambert_vert:AG,meshmatcap_frag:hG,meshmatcap_vert:gG,meshphong_frag:pG,meshphong_vert:mG,meshphysical_frag:_G,meshphysical_vert:FG,normal_frag:bG,normal_vert:vG,points_frag:yG,points_vert:wG,shadow_frag:CG,shadow_vert:EG,sprite_frag:kG,sprite_vert:BG};function uc(e){var t={};for(var i in e){t[i]={};for(var n in e[i]){var a=e[i][n];a&&(a.isColor||a.isMatrix3||a.isMatrix4||a.isVector2||a.isVector3||a.isVector4||a.isTexture)?t[i][n]=a.clone():Array.isArray(a)?t[i][n]=a.slice():t[i][n]=a}}return t}function Hn(e){for(var t={},i=0;i<e.length;i++){var n=uc(e[i]);for(var a in n)t[a]=n[a]}return t}var SG={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,this.fragmentShader=`void main() {
gl_FragColor = vec4( 1.0, 0.0, 0.0, 1.0 );
}`,this.linewidth=1,this.wireframe=!1,this.wireframeLinewidth=1,this.fog=!1,this.lights=!1,this.clipping=!1,this.skinning=!1,this.morphTargets=!1,this.morphNormals=!1,this.extensions={derivatives:!1,fragDepth:!1,drawBuffers:!1,shaderTextureLOD:!1},this.defaultAttributeValues={color:[1,1,1],uv:[0,0],uv2:[0,0]},this.index0AttributeName=void 0,this.uniformsNeedUpdate=!1,e!==void 0&&(e.attributes!==void 0&&console.error("THREE.ShaderMaterial: attributes should now be defined in THREE.BufferGeometry instead."),this.setValues(e))}Kn.prototype=Object.create(St.prototype);Kn.prototype.constructor=Kn;Kn.prototype.isShaderMaterial=!0;Kn.prototype.copy=function(e){return St.prototype.copy.call(this,e),this.fragmentShader=e.fragmentShader,this.vertexShader=e.vertexShader,this.uniforms=uc(e.uniforms),this.defines=Object.assign({},e.defines),this.wireframe=e.wireframe,this.wireframeLinewidth=e.wireframeLinewidth,this.lights=e.lights,this.clipping=e.clipping,this.skinning=e.skinning,this.morphTargets=e.morphTargets,this.morphNormals=e.morphNormals,this.extensions=e.extensions,this};Kn.prototype.toJSON=function(e){var t=St.prototype.toJSON.call(this,e);t.uniforms={};for(var i in this.uniforms){var n=this.uniforms[i],a=n.value;a&&a.isTexture?t.uniforms[i]={type:"t",value:a.toJSON(e).uuid}:a&&a.isColor?t.uniforms[i]={type:"c",value:a.getHex()}:a&&a.isVector2?t.uniforms[i]={type:"v2",value:a.toArray()}:a&&a.isVector3?t.uniforms[i]={type:"v3",value:a.toArray()}:a&&a.isVector4?t.uniforms[i]={type:"v4",value:a.toArray()}:a&&a.isMatrix3?t.uniforms[i]={type:"m3",value:a.toArray()}:a&&a.isMatrix4?t.uniforms[i]={type:"m4",value:a.toArray()}:t.uniforms[i]={value:a}}Object.keys(this.defines).length>0&&(t.defines=this.defines),t.vertexShader=this.vertexShader,t.fragmentShader=this.fragmentShader;var s={};for(var r in this.extensions)this.extensions[r]===!0&&(s[r]=!0);return Object.keys(s).length>0&&(t.extensions=s),t};function jl(e,t){this.origin=e!==void 0?e:new ee,this.direction=t!==void 0?t:new ee}Object.assign(jl.prototype,{set:function(e,t){return this.origin.copy(e),this.direction.copy(t),this},clone:function(){return new this.constructor().copy(this)},copy:function(e){return this.origin.copy(e.origin),this.direction.copy(e.direction),this},at:function(e,t){return t===void 0&&(console.warn("THREE.Ray: .at() target is now required"),t=new ee),t.copy(this.direction).multiplyScalar(e).add(this.origin)},lookAt:function(e){return this.direction.copy(e).sub(this.origin).normalize(),this},recast:function(){var e=new ee;return function(i){return this.origin.copy(this.at(i,e)),this}}(),closestPointToPoint:function(e,t){t===void 0&&(console.warn("THREE.Ray: .closestPointToPoint() target is now required"),t=new ee),t.subVectors(e,this.origin);var i=t.dot(this.direction);return i<0?t.copy(this.origin):t.copy(this.direction).multiplyScalar(i).add(this.origin)},distanceToPoint:function(e){return Math.sqrt(this.distanceSqToPoint(e))},distanceSqToPoint:function(){var e=new ee;return function(i){var n=e.subVectors(i,this.origin).dot(this.direction);return n<0?this.origin.distanceToSquared(i):(e.copy(this.direction).multiplyScalar(n).add(this.origin),e.distanceToSquared(i))}}(),distanceSqToSegment:function(){var e=new ee,t=new ee,i=new ee;return function(a,s,r,o){e.copy(a).add(s).multiplyScalar(.5),t.copy(s).sub(a).normalize(),i.copy(this.origin).sub(e);var l=a.distanceTo(s)*.5,u=-this.direction.dot(t),f=i.dot(this.direction),A=-i.dot(t),h=i.lengthSq(),p=Math.abs(1-u*u),F,y,E,w;if(p>0)if(F=u*A-f,y=u*f-A,w=l*p,F>=0)if(y>=-w)if(y<=w){var C=1/p;F*=C,y*=C,E=F*(F+u*y+2*f)+y*(u*F+y+2*A)+h}else y=l,F=Math.max(0,-(u*y+f)),E=-F*F+y*(y+2*A)+h;else y=-l,F=Math.max(0,-(u*y+f)),E=-F*F+y*(y+2*A)+h;else y<=-w?(F=Math.max(0,-(-u*l+f)),y=F>0?-l:Math.min(Math.max(-l,-A),l),E=-F*F+y*(y+2*A)+h):y<=w?(F=0,y=Math.min(Math.max(-l,-A),l),E=y*(y+2*A)+h):(F=Math.max(0,-(u*l+f)),y=F>0?l:Math.min(Math.max(-l,-A),l),E=-F*F+y*(y+2*A)+h);else y=u>0?-l:l,F=Math.max(0,-(u*y+f)),E=-F*F+y*(y+2*A)+h;return r&&r.copy(this.direction).multiplyScalar(F).add(this.origin),o&&o.copy(t).multiplyScalar(y
`),i=0;i<t.length;i++)t[i]=i+1+": "+t[i];return t.join(`
`)}function Gb(e,t,i){var n=e.createShader(t);return e.shaderSource(n,i),e.compileShader(n),e.getShaderParameter(n,e.COMPILE_STATUS)===!1&&console.error("THREE.WebGLShader: Shader couldn't compile."),e.getShaderInfoLog(n)!==""&&console.warn("THREE.WebGLShader: gl.getShaderInfoLog()",t===e.VERTEX_SHADER?"vertex":"fragment",e.getShaderInfoLog(n),mz(i)),n}var _z=0;function d4(e){switch(e){case Jf:return["Linear","( value )"];case lQ:return["sRGB","( value )"];case uQ:return["RGBE","( value )"];case cQ:return["RGBM","( value, 7.0 )"];case dQ:return["RGBM","( value, 16.0 )"];case fQ:return["RGBD","( value, 256.0 )"];case Z3:return["Gamma","( value, float( GAMMA_FACTOR ) )"];default:throw new Error("unsupported encoding: "+e)}}function Ld(e,t){var i=d4(t);return"vec4 "+e+"( vec4 value ) { return "+i[0]+"ToLinear"+i[1]+"; }"}function Fz(e,t){var i=d4(t);return"vec4 "+e+"( vec4 value ) { return LinearTo"+i[0]+i[1]+"; }"}function bz(e,t){var i;switch(t){case V3:i="Linear";break;case SH:i="Reinhard";break;case DH:i="Uncharted2";break;case xH:i="OptimizedCineon";break;case TH:i="ACESFilmic";break;default:throw new Error("unsupported toneMapping: "+t)}return"vec3 "+e+"( vec3 color ) { return "+i+"ToneMapping( color ); }"}function vz(e,t,i){e=e||{};var n=[e.derivatives||t.envMapCubeUV||t.bumpMap||t.normalMap&&!t.objectSpaceNormalMap||t.flatShading?"#extension GL_OES_standard_derivatives : enable":"",(e.fragDepth||t.logarithmicDepthBuffer)&&i.get("EXT_frag_depth")?"#extension GL_EXT_frag_depth : enable":"",e.drawBuffers&&i.get("WEBGL_draw_buffers")?"#extension GL_EXT_draw_buffers : require":"",(e.shaderTextureLOD||t.envMap)&&i.get("EXT_shader_texture_lod")?"#extension GL_EXT_shader_texture_lod : enable":""];return n.filter(_u).join(`
`)}function yz(e){var t=[];for(var i in e){var n=e[i];n!==!1&&t.push("#define "+i+" "+n)}return t.join(`
`)}function wz(e,t){for(var i={},n=e.getProgramParameter(t,e.ACTIVE_ATTRIBUTES),a=0;a<n;a++){var s=e.getActiveAttrib(t,a),r=s.name;i[r]=e.getAttribLocation(t,r)}return i}function _u(e){return e!==""}function zb(e,t){return e.replace(/NUM_DIR_LIGHTS/g,t.numDirLights).replace(/NUM_SPOT_LIGHTS/g,t.numSpotLights).replace(/NUM_RECT_AREA_LIGHTS/g,t.numRectAreaLights).replace(/NUM_POINT_LIGHTS/g,t.numPointLights).replace(/NUM_HEMI_LIGHTS/g,t.numHemiLights)}function qb(e,t){return e.replace(/NUM_CLIPPING_PLANES/g,t.numClippingPlanes).replace(/UNION_CLIPPING_PLANES/g,t.numClippingPlanes-t.numClipIntersection)}function Em(e){var t=/^[ \t]*#include +<([\w\d./]+)>/gm;function i(n,a){var s=Qt[a];if(s===void 0)throw new Error("Can not resolve #include <"+a+">");return Em(s)}return e.replace(t,i)}function Vb(e){var t=/#pragma unroll_loop[\s]+?for \( int i \= (\d+)\; i < (\d+)\; i \+\+ \) \{([\s\S]+?)(?=\})\}/g;function i(n,a,s,r){for(var o="",l=parseInt(a);l<parseInt(s);l++)o+=r.replace(/\[ i \]/g,"[ "+l+" ]");return o}return e.replace(t,i)}function Cz(e,t,i,n,a,s,r){var o=e.context,l=n.defines,u=a.vertexShader,f=a.fragmentShader,A="SHADOWMAP_TYPE_BASIC";s.shadowMapType===N3?A="SHADOWMAP_TYPE_PCF":s.shadowMapType===oH&&(A="SHADOWMAP_TYPE_PCF_SOFT");var h="ENVMAP_TYPE_CUBE",p="ENVMAP_MODE_REFLECTION",F="ENVMAP_BLENDING_MULTIPLY";if(s.envMap){switch(n.envMap.mapping){case v1:case mm:h="ENVMAP_TYPE_CUBE";break;case y1:case w1:h="ENVMAP_TYPE_CUBE_UV";break;case W3:case _m:h="ENVMAP_TYPE_EQUIREC";break;case K3:h="ENVMAP_TYPE_SPHERE";break}switch(n.envMap.mapping){case mm:case _m:p="ENVMAP_MODE_REFRACTION";break}switch(n.combine){case Eh:F="ENVMAP_BLENDING_MULTIPLY";break;case kH:F="ENVMAP_BLENDING_MIX";break;case BH:F="ENVMAP_BLENDING_ADD";break}}var y=e.gammaFactor>0?e.gammaFactor:1,E=r.isWebGL2?"":vz(n.extensions,s,t),w=yz(l),C=o.createProgram(),B,S;if(n.isRawShaderMaterial?(B=[w].filter(_u).join(`
`),B.length>0&&(B+=`
`),S=[E,w].filter(_u).join(`
`),S.length>0&&(S+=`
`)):(B=["precision "+s.precision+" float;","precision "+s.precision+" int;","#define SHADER_NAME "+a.name,w,s.supportsVertexTextures?"#define VERTEX_TEXTURES":"","#define GAMMA_FACTOR "+y,"#define MAX_BONES "+s.maxBones,s.useFog&&s.fog?"#define USE_FOG":"",s.useFog&&s.fogExp?"#define FOG_EXP2":"",s.map?"#define USE_MAP":"",s.envMap?"#define USE_ENVMAP":"",s.envMap?"#define "+p:"",s.lightMap?"#define USE_LIGHTMAP":"",s.aoMap?"#define USE_AOMAP":"",s.emissiveMap?"#define USE_EMISSIVEMAP":"",s.bumpMap?"#define USE_BUMPMAP":"",s.normalMap?"#define USE_NORMALMAP":"",s.normalMap&&s.objectSpaceNormalMap?"#define OBJECTSPACE_NORMALMAP":"",s.displacementMap&&s.supportsVertexTextures?"#define USE_DISPLACEMENTMAP":"",s.specularMap?"#define USE_SPECULARMAP":"",s.roughnessMap?"#define USE_ROUGHNESSMAP":"",s.metalnessMap?"#define USE_METALNESSMAP":"",s.alphaMap?"#define USE_ALPHAMAP":"",s.vertexColors?"#define USE_COLOR":"",s.flatShading?"#define FLAT_SHADED":"",s.skinning?"#define USE_SKINNING":"",s.useVertexTexture?"#define BONE_TEXTURE":"",s.morphTargets?"#define USE_MORPHTARGETS":"",s.morphNormals&&s.flatShading===!1?"#define USE_MORPHNORMALS":"",s.doubleSided?"#define DOUBLE_SIDED":"",s.flipSided?"#define FLIP_SIDED":"",s.shadowMapEnabled?"#define USE_SHADOWMAP":"",s.shadowMapEnabled?"#define "+A:"",s.sizeAttenuation?"#define USE_SIZEATTENUATION":"",s.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"",s.logarithmicDepthBuffer&&(r.isWebGL2||t.get("EXT_frag_depth"))?"#define USE_LOGDEPTHBUF_EXT":"","uniform mat4 modelMatrix;","uniform mat4 modelViewMatrix;","uniform mat4 projectionMatrix;","uniform mat4 viewMatrix;","uniform mat3 normalMatrix;","uniform vec3 cameraPosition;","attribute vec3 position;","attribute vec3 normal;","attribute vec2 uv;","#ifdef USE_COLOR"," attribute vec3 color;","#endif","#ifdef USE_MORPHTARGETS"," attribute vec3 morphTarget0;"," attribute vec3 morphTarget1;"," attribute vec3 morphTarget2;"," attribute vec3 morphTarget3;"," #ifdef USE_MORPHNORMALS"," attribute vec3 morphNormal0;"," attribute vec3 morphNormal1;"," attribute vec3 morphNormal2;"," attribute vec3 morphNormal3;"," #else"," attribute vec3 morphTarget4;"," attribute vec3 morphTarget5;"," attribute vec3 morphTarget6;"," attribute vec3 morphTarget7;"," #endif","#endif","#ifdef USE_SKINNING"," attribute vec4 skinIndex;"," attribute vec4 skinWeight;","#endif",`
`].filter(_u).join(`
`),S=[E,"precision "+s.precision+" float;","precision "+s.precision+" int;","#define SHADER_NAME "+a.name,w,s.alphaTest?"#define ALPHATEST "+s.alphaTest+(s.alphaTest%1?"":".0"):"","#define GAMMA_FACTOR "+y,s.useFog&&s.fog?"#define USE_FOG":"",s.useFog&&s.fogExp?"#define FOG_EXP2":"",s.map?"#define USE_MAP":"",s.matcap?"#define USE_MATCAP":"",s.envMap?"#define USE_ENVMAP":"",s.envMap?"#define "+h:"",s.envMap?"#define "+p:"",s.envMap?"#define "+F:"",s.lightMap?"#define USE_LIGHTMAP":"",s.aoMap?"#define USE_AOMAP":"",s.emissiveMap?"#define USE_EMISSIVEMAP":"",s.bumpMap?"#define USE_BUMPMAP":"",s.normalMap?"#define USE_NORMALMAP":"",s.normalMap&&s.objectSpaceNormalMap?"#define OBJECTSPACE_NORMALMAP":"",s.specularMap?"#define USE_SPECULARMAP":"",s.roughnessMap?"#define USE_ROUGHNESSMAP":"",s.metalnessMap?"#define USE_METALNESSMAP":"",s.alphaMap?"#define USE_ALPHAMAP":"",s.vertexColors?"#define USE_COLOR":"",s.gradientMap?"#define USE_GRADIENTMAP":"",s.flatShading?"#define FLAT_SHADED":"",s.doubleSided?"#define DOUBLE_SIDED":"",s.flipSided?"#define FLIP_SIDED":"",s.shadowMapEnabled?"#define USE_SHADOWMAP":"",s.shadowMapEnabled?"#define "+A:"",s.premultipliedAlpha?"#define PREMULTIPLIED_ALPHA":"",s.physicallyCorrectLights?"#define PHYSICALLY_CORRECT_LIGHTS":"",s.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"",s.logarithmicDepthBuffer&&(r.isWebGL2||t.get("EXT_frag_depth"))?"#define USE_LOGDEPTHBUF_EXT":"",s.envMap&&(r.isWebGL2||t.get("EXT_shader_texture_lod"))?"#define TEXTURE_LOD_EXT":"","uniform mat4 viewMatrix;","uniform vec3 cameraPosition;",s.toneMapping!==Jg?"#define TONE_MAPPING":"",s.toneMapping!==Jg?Qt.tonemapping_pars_fragment:"",s.toneMapping!==Jg?bz("toneMapping",s.toneMapping):"",s.dithering?"#define DITHERING":"",s.outputEncoding||s.mapEncoding||s.matcapEncoding||s.envMapEncoding||s.emissiveMapEncoding?Qt.encodings_pars_fragment:"",s.mapEncoding?Ld("mapTexelToLinear",s.mapEncoding):"",s.matcapEncoding?Ld("matcapTexelToLinear",s.matcapEncoding):"",s.envMapEncoding?Ld("envMapTexelToLinear",s.envMapEncoding):"",s.emissiveMapEncoding?Ld("emissiveMapTexelToLinear",s.emissiveMapEncoding):"",s.outputEncoding?Fz("linearToOutputTexel",s.outputEncoding):"",s.depthPacking?"#define DEPTH_PACKING "+n.depthPacking:"",`
`].filter(_u).join(`
`)),u=Em(u),u=zb(u,s),u=qb(u,s),f=Em(f),f=zb(f,s),f=qb(f,s),u=Vb(u),f=Vb(f),r.isWebGL2&&!n.isRawShaderMaterial){var U=!1,N=/^\s*#version\s+300\s+es\s*\n/;n.isShaderMaterial&&u.match(N)!==null&&f.match(N)!==null&&(U=!0,u=u.replace(N,""),f=f.replace(N,"")),B=[`#version 300 es
`,"#define attribute in","#define varying out","#define texture2D texture"].join(`
`)+`
`+B,S=[`#version 300 es
`,"#define varying in",U?"":"out highp vec4 pc_fragColor;",U?"":"#define gl_FragColor pc_fragColor","#define gl_FragDepthEXT gl_FragDepth","#define texture2D texture","#define textureCube texture","#define texture2DProj textureProj","#define texture2DLodEXT textureLod","#define texture2DProjLodEXT textureProjLod","#define textureCubeLodEXT textureLod","#define texture2DGradEXT textureGrad","#define texture2DProjGradEXT textureProjGrad","#define textureCubeGradEXT textureGrad"].join(`
`)+`
`+S}var z=B+u,Q=S+f,R=Gb(o,o.VERTEX_SHADER,z),W=Gb(o,o.FRAGMENT_SHADER,Q);o.attachShader(C,R),o.attachShader(C,W),n.index0AttributeName!==void 0?o.bindAttribLocation(C,0,n.index0AttributeName):s.morphTargets===!0&&o.bindAttribLocation(C,0,"position"),o.linkProgram(C);var Z=o.getProgramInfoLog(C).trim(),ce=o.getShaderInfoLog(R).trim(),Fe=o.getShaderInfoLog(W).trim(),ge=!0,le=!0;o.getProgramParameter(C,o.LINK_STATUS)===!1?(ge=!1,console.error("THREE.WebGLProgram: shader error: ",o.getError(),"gl.VALIDATE_STATUS",o.getProgramParameter(C,o.VALIDATE_STATUS),"gl.getProgramInfoLog",Z,ce,Fe)):Z!==""?console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",Z):(ce===""||Fe==="")&&(le=!1),le&&(this.diagnostics={runnable:ge,material:n,programLog:Z,vertexShader:{log:ce,prefix:B},fragmentShader:{log:Fe,prefix:S}}),o.deleteShader(R),o.deleteShader(W);var se;this.getUniforms=function(){return se===void 0&&(se=new cr(o,C,e)),se};var me;return this.getAttributes=function(){return me===void 0&&(me=wz(o,C)),me},this.destroy=function(){o.deleteProgram(C),this.program=void 0},Object.defineProperties(this,{uniforms:{get:function(){return console.warn("THREE.WebGLProgram: .uniforms is now .getUniforms()."),this.getUniforms()}},attributes:{get:function(){return console.warn("THREE.WebGLProgram: .attributes is now .getAttributes()."),this.getAttributes()}}}),this.name=a.name,this.id=_z++,this.code=i,this.usedTimes=1,this.program=C,this.vertexShader=R,this.fragmentShader=W,this}function Ez(e,t,i){var n=[],a={MeshDepthMaterial:"depth",MeshDistanceMaterial:"distanceRGBA",MeshNormalMaterial:"normal",MeshBasicMaterial:"basic",MeshLambertMaterial:"lambert",MeshPhongMaterial:"phong",MeshToonMaterial:"phong",MeshStandardMaterial:"physical",MeshPhysicalMaterial:"physical",MeshMatcapMaterial:"matcap",LineBasicMaterial:"basic",LineDashedMaterial:"dashed",PointsMaterial:"points",ShadowMaterial:"shadow",SpriteMaterial:"sprite"},s=["precision","supportsVertexTextures","map","mapEncoding","matcap","matcapEncoding","envMap","envMapMode","envMapEncoding","lightMap","aoMap","emissiveMap","emissiveMapEncoding","bumpMap","normalMap","objectSpaceNormalMap","displacementMap","specularMap","roughnessMap","metalnessMap","gradientMap","alphaMap","combine","vertexColors","fog","useFog","fogExp","flatShading","sizeAttenuation","logarithmicDepthBuffer","skinning","maxBones","useVertexTexture","morphTargets","morphNormals","maxMorphTargets","maxMorphNormals","premultipliedAlpha","numDirLights","numPointLights","numSpotLights","numHemiLights","numRectAreaLights","shadowMapEnabled","shadowMapType","toneMapping","physicallyCorrectLights","alphaTest","doubleSided","flipSided","numClippingPlanes","numClipIntersection","depthPacking","dithering"];function r(l){var u=l.skeleton,f=u.bones;if(i.floatVertexTextures)return 1024;var A=i.maxVertexUniforms,h=Math.floor((A-20)/4),p=Math.min(h,f.length);return p<f.length?(console.warn("THREE.WebGLRenderer: Skeleton has "+f.length+" bones. This GPU supports "+p+"."),0):p}function o(l,u){var f;return l?l.isTexture?f=l.encoding:l.isWebGLRenderTarget&&(console.warn("THREE.WebGLPrograms.getTextureEncodingFromMap: don't use render targets as textures. Use their .texture property instead."),f=l.texture.encoding):f=Jf,f===Jf&&u&&(f=Z3),f}this.getParameters=function(l,u,f,A,h,p,F){var y=a[l.type],E=F.isSkinnedMesh?r(F):0,w=i.precision;l.precision!==null&&(w=i.getMaxPrecision(l.precision),w!==l.precision&&console.warn("THREE.WebGLProgram.getParameters:",l.precision,"not supported, using",w,"instead."));var C=e.getRenderTarget(),B={shaderID:y,precision:w,supportsVertexTextures:i.vertexTextures,outputEncoding:o(C?C.texture:null,e.gammaOutput),map:!!l.map,mapEncoding:o(l.map,e.gammaInput),matcap:!!l.matcap,matcapEncoding:o(l.matcap,e.gammaInput),envMap:!!l.envMap,envMapMode:l.envMap&&l.envMap.mapping,envMapEncoding:o(l.envMap,e.gammaInput),envMapCubeUV:!!l.envMap&&(l.envMap.mapping===y1||l.envMap.mapping===w1),lightMap:!!l.lightMap,aoMap:!!l.aoMap,emissiveMap:!!l.emissiveMap,emissiveMapEncoding:o(l.emissiveMap,e.gammaInput),bumpMap:!!l.bumpMap,normalM
Object.assign(Aa.prototype,{beforeStart_:Aa.prototype.copySampleValue_,afterEnd_:Aa.prototype.copySampleValue_});function Im(e,t,i,n){Aa.call(this,e,t,i,n),this._weightPrev=-0,this._offsetPrev=-0,this._weightNext=-0,this._offsetNext=-0}Im.prototype=Object.assign(Object.create(Aa.prototype),{constructor:Im,DefaultSettings_:{endingStart:pl,endingEnd:pl},intervalChanged_:function(e,t,i){var n=this.parameterPositions,a=e-2,s=e+1,r=n[a],o=n[s];if(r===void 0)switch(this.getSettings_().endingStart){case Zo:a=e,r=2*t-i;break;case Xf:a=n.length-2,r=t+n[a]-n[a+1];break;default:a=e,r=i}if(o===void 0)switch(this.getSettings_().endingEnd){case Zo:s=e,o=2*i-t;break;case Xf:s=1,o=i+n[1]-n[0];break;default:s=e-1,o=t}var l=(i-t)*.5,u=this.valueSize;this._weightPrev=l/(t-r),this._weightNext=l/(o-i),this._offsetPrev=a*u,this._offsetNext=s*u},interpolate_:function(e,t,i,n){for(var a=this.resultBuffer,s=this.sampleValues,r=this.valueSize,o=e*r,l=o-r,u=this._offsetPrev,f=this._offsetNext,A=this._weightPrev,h=this._weightNext,p=(i-t)/(n-t),F=p*p,y=F*p,E=-A*y+2*A*F-A*p,w=(1+A)*y+(-1.5-2*A)*F+(-.5+A)*p+1,C=(-1-h)*y+(1.5+h)*F+.5*p,B=h*y-h*F,S=0;S!==r;++S)a[S]=E*s[u+S]+w*s[l+S]+C*s[o+S]+B*s[f+S];return a}});function BA(e,t,i,n){Aa.call(this,e,t,i,n)}BA.prototype=Object.assign(Object.create(Aa.prototype),{constructor:BA,interpolate_:function(e,t,i,n){for(var a=this.resultBuffer,s=this.sampleValues,r=this.valueSize,o=e*r,l=o-r,u=(i-t)/(n-t),f=1-u,A=0;A!==r;++A)a[A]=s[l+A]*f+s[o+A]*u;return a}});function Pm(e,t,i,n){Aa.call(this,e,t,i,n)}Pm.prototype=Object.assign(Object.create(Aa.prototype),{constructor:Pm,interpolate_:function(e){return this.copySampleValue_(e-1)}});function mn(e,t,i,n){if(e===void 0)throw new Error("THREE.KeyframeTrack: track name is undefined");if(t===void 0||t.length===0)throw new Error("THREE.KeyframeTrack: no keyframes in track named "+e);this.name=e,this.times=Sn.convertArray(t,this.TimeBufferType),this.values=Sn.convertArray(i,this.ValueBufferType),this.setInterpolation(n||this.DefaultInterpolation)}Object.assign(mn,{toJSON:function(e){var t=e.constructor,i;if(t.toJSON!==void 0)i=t.toJSON(e);else{i={name:e.name,times:Sn.convertArray(e.times,Array),values:Sn.convertArray(e.values,Array)};var n=e.getInterpolation();n!==e.DefaultInterpolation&&(i.interpolation=n)}return i.type=e.ValueTypeName,i}});Object.assign(mn.prototype,{constructor:mn,TimeBufferType:Float32Array,ValueBufferType:Float32Array,DefaultInterpolation:Cf,InterpolantFactoryMethodDiscrete:function(e){return new Pm(this.times,this.values,this.getValueSize(),e)},InterpolantFactoryMethodLinear:function(e){return new BA(this.times,this.values,this.getValueSize(),e)},InterpolantFactoryMethodSmooth:function(e){return new Im(this.times,this.values,this.getValueSize(),e)},setInterpolation:function(e){var t;switch(e){case Yf:t=this.InterpolantFactoryMethodDiscrete;break;case Cf:t=this.InterpolantFactoryMethodLinear;break;case Zg:t=this.InterpolantFactoryMethodSmooth;break}if(t===void 0){var i="unsupported interpolation for "+this.ValueTypeName+" keyframe track named "+this.name;if(this.createInterpolant===void 0)if(e!==this.DefaultInterpolation)this.setInterpolation(this.DefaultInterpolation);else throw new Error(i);return console.warn("THREE.KeyframeTrack:",i),this}return this.createInterpolant=t,this},getInterpolation:function(){switch(this.createInterpolant){case this.InterpolantFactoryMethodDiscrete:return Yf;case this.InterpolantFactoryMethodLinear:return Cf;case this.InterpolantFactoryMethodSmooth:return Zg}},getValueSize:function(){return this.values.length/this.times.length},shift:function(e){if(e!==0)for(var t=this.times,i=0,n=t.length;i!==n;++i)t[i]+=e;return this},scale:function(e){if(e!==1)for(var t=this.times,i=0,n=t.length;i!==n;++i)t[i]*=e;return this},trim:function(e,t){for(var i=this.times,n=i.length,a=0,s=n-1;a!==n&&i[a]<e;)++a;for(;s!==-1&&i[s]>t;)--s;if(++s,a!==0||s!==n){a>=s&&(s=Math.max(s,1),a=s-1);var r=this.getValueSize();this.times=Sn.arraySlice(i,a,s),this.values=Sn.arraySlice(this.values,a*r,s*r)}return this},validate:function(){var e=!0,t=this.g
`)o=0,l-=s;else{var A=pq(f,a,o,l,i);o+=A.offsetX,r.push(A.path)}}return r}function pq(e,t,i,n,a){var s=a.glyphs[e]||a.glyphs["?"];if(s){var r=new k4,o,l,u,f,A,h,p,F;if(s.o)for(var y=s._cachedOutline||(s._cachedOutline=s.o.split(" ")),E=0,w=y.length;E<w;){var C=y[E++];switch(C){case"m":o=y[E++]*t+i,l=y[E++]*t+n,r.moveTo(o,l);break;case"l":o=y[E++]*t+i,l=y[E++]*t+n,r.lineTo(o,l);break;case"q":u=y[E++]*t+i,f=y[E++]*t+n,A=y[E++]*t+i,h=y[E++]*t+n,r.quadraticCurveTo(A,h,u,f);break;case"b":u=y[E++]*t+i,f=y[E++]*t+n,A=y[E++]*t+i,h=y[E++]*t+n,p=y[E++]*t+i,F=y[E++]*t+n,r.bezierCurveTo(A,h,p,F,u,f);break}}return{offsetX:s.ha*t,path:r}}}function mq(e){this.manager=e!==void 0?e:Zn}Object.assign(mq.prototype,{load:function(e,t,i,n){var a=this,s=new Us(this.manager);s.setPath(this.path),s.load(e,function(r){var o;try{o=JSON.parse(r)}catch{console.warn("THREE.FontLoader: typeface.js support is being deprecated. Use typeface.json instead."),o=JSON.parse(r.substring(65,r.length-2))}var l=a.parse(o);t&&t(l)},i,n)},parse:function(e){return new B4(e)},setPath:function(e){return this.path=e,this}});function DA(){}DA.Handlers={handlers:[],add:function(e,t){this.handlers.push(e,t)},get:function(e){for(var t=this.handlers,i=0,n=t.length;i<n;i+=2){var a=t[i],s=t[i+1];if(a.test(e))return s}return null}};Object.assign(DA.prototype,{crossOrigin:"anonymous",onLoadStart:function(){},onLoadProgress:function(){},onLoadComplete:function(){},initMaterials:function(e,t,i){for(var n=[],a=0;a<e.length;++a)n[a]=this.createMaterial(e[a],t,i);return n},createMaterial:function(){var e={NoBlending:Uu,NormalBlending:ll,AdditiveBlending:Am,SubtractiveBlending:hm,MultiplyBlending:gm,CustomBlending:G3},t=new Ft,i=new x1,n=new I1;return function(s,r,o){var l={};function u(p,F,y,E,w){var C=r+p,B=DA.Handlers.get(C),S;B!==null?S=B.load(C):(i.setCrossOrigin(o),S=i.load(C)),F!==void 0&&(S.repeat.fromArray(F),F[0]!==1&&(S.wrapS=ir),F[1]!==1&&(S.wrapT=ir)),y!==void 0&&S.offset.fromArray(y),E!==void 0&&(E[0]==="repeat"&&(S.wrapS=ir),E[0]==="mirror"&&(S.wrapS=gl),E[1]==="repeat"&&(S.wrapT=ir),E[1]==="mirror"&&(S.wrapT=gl)),w!==void 0&&(S.anisotropy=w);var U=It.generateUUID();return l[U]=S,U}var f={uuid:It.generateUUID(),type:"MeshLambertMaterial"};for(var A in s){var h=s[A];switch(A){case"DbgColor":case"DbgIndex":case"opticalDensity":case"illumination":break;case"DbgName":f.name=h;break;case"blending":f.blending=e[h];break;case"colorAmbient":case"mapAmbient":console.warn("THREE.Loader.createMaterial:",A,"is no longer supported.");break;case"colorDiffuse":f.color=t.fromArray(h).getHex();break;case"colorSpecular":f.specular=t.fromArray(h).getHex();break;case"colorEmissive":f.emissive=t.fromArray(h).getHex();break;case"specularCoef":f.shininess=h;break;case"shading":h.toLowerCase()==="basic"&&(f.type="MeshBasicMaterial"),h.toLowerCase()==="phong"&&(f.type="MeshPhongMaterial"),h.toLowerCase()==="standard"&&(f.type="MeshStandardMaterial");break;case"mapDiffuse":f.map=u(h,s.mapDiffuseRepeat,s.mapDiffuseOffset,s.mapDiffuseWrap,s.mapDiffuseAnisotropy);break;case"mapDiffuseRepeat":case"mapDiffuseOffset":case"mapDiffuseWrap":case"mapDiffuseAnisotropy":break;case"mapEmissive":f.emissiveMap=u(h,s.mapEmissiveRepeat,s.mapEmissiveOffset,s.mapEmissiveWrap,s.mapEmissiveAnisotropy);break;case"mapEmissiveRepeat":case"mapEmissiveOffset":case"mapEmissiveWrap":case"mapEmissiveAnisotropy":break;case"mapLight":f.lightMap=u(h,s.mapLightRepeat,s.mapLightOffset,s.mapLightWrap,s.mapLightAnisotropy);break;case"mapLightRepeat":case"mapLightOffset":case"mapLightWrap":case"mapLightAnisotropy":break;case"mapAO":f.aoMap=u(h,s.mapAORepeat,s.mapAOOffset,s.mapAOWrap,s.mapAOAnisotropy);break;case"mapAORepeat":case"mapAOOffset":case"mapAOWrap":case"mapAOAnisotropy":break;case"mapBump":f.bumpMap=u(h,s.mapBumpRepeat,s.mapBumpOffset,s.mapBumpWrap,s.mapBumpAnisotropy);break;case"mapBumpScale":f.bumpScale=h;break;case"mapBumpRepeat":case"mapBumpOffset":case"mapBumpWrap":case"mapBumpAnisotropy":break;case"mapNormal":f.normalMap=u(h,s.mapNormalRepeat,s.mapNormalOffset,s.mapNormalWrap,s.mapNormalAnisotropy);break;case"map
Object.assign(Dn.prototype,{_getValue_unbound:Dn.prototype.getValue,_setValue_unbound:Dn.prototype.setValue});function Fq(){this.uuid=It.generateUUID(),this._objects=Array.prototype.slice.call(arguments),this.nCachedObjects_=0;var e={};this._indicesByUUID=e;for(var t=0,i=arguments.length;t!==i;++t)e[arguments[t].uuid]=t;this._paths=[],this._parsedPaths=[],this._bindings=[],this._bindingsIndicesByPath={};var n=this;this.stats={objects:{get total(){return n._objects.length},get inUse(){return this.total-n.nCachedObjects_}},get bindingsPerObject(){return n._bindings.length}}}Object.assign(Fq.prototype,{isAnimationObjectGroup:!0,add:function(){for(var e=this._objects,t=e.length,i=this.nCachedObjects_,n=this._indicesByUUID,a=this._paths,s=this._parsedPaths,r=this._bindings,o=r.length,l=void 0,u=0,f=arguments.length;u!==f;++u){var A=arguments[u],h=A.uuid,p=n[h];if(p===void 0){p=t++,n[h]=p,e.push(A);for(var F=0,y=o;F!==y;++F)r[F].push(new Dn(A,a[F],s[F]))}else if(p<i){l=e[p];var E=--i,w=e[E];n[w.uuid]=p,e[p]=w,n[h]=E,e[E]=A;for(var F=0,y=o;F!==y;++F){var C=r[F],B=C[E],S=C[p];C[p]=B,S===void 0&&(S=new Dn(A,a[F],s[F])),C[E]=S}}else e[p]!==l&&console.error("THREE.AnimationObjectGroup: Different objects with the same UUID detected. Clean the caches or recreate your infrastructure when reloading scenes.")}this.nCachedObjects_=i},remove:function(){for(var e=this._objects,t=this.nCachedObjects_,i=this._indicesByUUID,n=this._bindings,a=n.length,s=0,r=arguments.length;s!==r;++s){var o=arguments[s],l=o.uuid,u=i[l];if(u!==void 0&&u>=t){var f=t++,A=e[f];i[A.uuid]=u,e[u]=A,i[l]=f,e[f]=o;for(var h=0,p=a;h!==p;++h){var F=n[h],y=F[f],E=F[u];F[u]=y,F[f]=E}}}this.nCachedObjects_=t},uncache:function(){for(var e=this._objects,t=e.length,i=this.nCachedObjects_,n=this._indicesByUUID,a=this._bindings,s=a.length,r=0,o=arguments.length;r!==o;++r){var l=arguments[r],u=l.uuid,f=n[u];if(f!==void 0)if(delete n[u],f<i){var A=--i,h=e[A],p=--t,F=e[p];n[h.uuid]=f,e[f]=h,n[F.uuid]=A,e[A]=F,e.pop();for(var y=0,E=s;y!==E;++y){var w=a[y],C=w[A],B=w[p];w[f]=C,w[A]=B,w.pop()}}else{var p=--t,F=e[p];n[F.uuid]=f,e[f]=F,e.pop();for(var y=0,E=s;y!==E;++y){var w=a[y];w[f]=w[p],w.pop()}}}this.nCachedObjects_=i},subscribe_:function(e,t){var i=this._bindingsIndicesByPath,n=i[e],a=this._bindings;if(n!==void 0)return a[n];var s=this._paths,r=this._parsedPaths,o=this._objects,l=o.length,u=this.nCachedObjects_,f=new Array(l);n=a.length,i[e]=n,s.push(e),r.push(t),a.push(f);for(var A=u,h=o.length;A!==h;++A){var p=o[A];f[A]=new Dn(p,e,t)}return f},unsubscribe_:function(e){var t=this._bindingsIndicesByPath,i=t[e];if(i!==void 0){var n=this._paths,a=this._parsedPaths,s=this._bindings,r=s.length-1,o=s[r],l=e[r];t[l]=i,s[i]=o,s.pop(),a[i]=a[r],a.pop(),n[i]=n[r],n.pop()}}});function M4(e,t,i){this._mixer=e,this._clip=t,this._localRoot=i||null;for(var n=t.tracks,a=n.length,s=new Array(a),r={endingStart:pl,endingEnd:pl},o=0;o!==a;++o){var l=n[o].createInterpolant(null);s[o]=l,l.settings=r}this._interpolantSettings=r,this._interpolants=s,this._propertyBindings=new Array(a),this._cacheIndex=null,this._byClipCacheIndex=null,this._timeScaleInterpolant=null,this._weightInterpolant=null,this.loop=aQ,this._loopCount=-1,this._startTime=null,this.time=0,this.timeScale=1,this._effectiveTimeScale=1,this.weight=1,this._effectiveWeight=1,this.repetitions=1/0,this.paused=!1,this.enabled=!0,this.clampWhenFinished=!1,this.zeroSlopeAtStart=!0,this.zeroSlopeAtEnd=!0}Object.assign(M4.prototype,{play:function(){return this._mixer._activateAction(this),this},stop:function(){return this._mixer._deactivateAction(this),this.reset()},reset:function(){return this.paused=!1,this.enabled=!0,this.time=0,this._loopCount=-1,this._startTime=null,this.stopFading().stopWarping()},isRunning:function(){return this.enabled&&!this.paused&&this.timeScale!==0&&this._startTime===null&&this._mixer._isActiveAction(this)},isScheduled:function(){return this._mixer._isActiveAction(this)},startAt:function(e){return this._startTime=e,this},setLoop:function(e,t){return this.loop=e,this.repetitions=t,this},setEffectiveWeight:function(e){
const wq="",Cq="";let Nd=!1,hv,Eq=0;function kq(e){const[t,i]=e.split(","),n=t.match(/:(.*?);/)[1],a=atob(i),s=new ArrayBuffer(a.length),r=new Uint8Array(s);for(let o=0;o<a.length;o++)r[o]=a.charCodeAt(o);return new Blob([s],{type:n})}class Bq{constructor(){window.addEventListener("resize",this.resize.bind(this)),this.renderer=new D1,this.renderer.autoClear=!1,this.renderer.setSize(window.innerWidth,window.innerHeight),this.rtTexture=new Ya(window.innerWidth,window.innerHeight,{minFilter:gn,magFilter:sn,format:ks,type:Bh}),this.gameTexture=new bl,this.gameTexture.needsUpdate=!0,this.material=this.createShaderMaterial(),this.sceneRTT=this.createScene(),this.cameraRTT=this.createCamera(),this.appendRendererToDOM(),this.animate=this.animate.bind(this),requestAnimationFrame(this.animate)}createCamera(){const t=new Rc(window.innerWidth/-2,window.innerWidth/2,window.innerHeight/2,window.innerHeight/-2,-1e4,1e4);return t.setViewOffset(window.innerWidth,window.innerHeight,0,0,window.innerWidth,window.innerHeight),t}createScene(){const t=new iA,i=new Ao(window.innerWidth,window.innerHeight),n=new xn(i,this.material);return n.position.z=-100,t.add(n),t}createShaderMaterial(){return new Kn({uniforms:{tDiffuse:{value:this.gameTexture}},vertexShader:`
varying vec2 vUv;
void main() {
vUv = vec2(uv.x, 1.0 - uv.y);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}`,fragmentShader:`
varying vec2 vUv;
uniform sampler2D tDiffuse;
void main() {
gl_FragColor = texture2D(tDiffuse, vUv);
}`})}appendRendererToDOM(){const t=document.createElement("div");t.id="three-game-render",t.style.display="none",t.appendChild(this.renderer.domElement),document.body.appendChild(t)}resize(){this.cameraRTT=this.createCamera(),this.sceneRTT=this.createScene(),this.rtTexture.setSize(window.innerWidth,window.innerHeight),this.renderer.setSize(window.innerWidth,window.innerHeight)}animate(){if(requestAnimationFrame(this.animate),Nd){this.renderer.clear(),this.renderer.render(this.sceneRTT,this.cameraRTT,this.rtTexture,!0);const t=new Uint8Array(window.innerWidth*window.innerHeight*4);this.renderer.readRenderTargetPixels(this.rtTexture,0,0,window.innerWidth,window.innerHeight,t),this.updateCanvas(t)}}updateCanvas(t){this.canvas||this.createTempCanvas(),this.canvas.style.display="inline",this.canvas.width=window.innerWidth,this.canvas.height=window.innerHeight;const i=this.canvas.getContext("2d"),n=new ImageData(new Uint8ClampedArray(t.buffer),window.innerWidth,window.innerHeight);i.putImageData(n,0,0)}createTempCanvas(){this.canvas=document.createElement("canvas"),this.canvas.style.display="none",document.body.appendChild(this.canvas)}renderToTarget(t){this.canvas=t,Nd=!0}async requestScreenshot(t=wq,i=Cq,n={}){if(!t||!i)return console.warn("URL or field is not defined."),null;this.canvas||this.createTempCanvas(),Nd=!0,await new Promise(r=>setTimeout(r,10));const a=this.canvas.toDataURL("image/png"),s=new FormData;s.append(i,kq(a),"screenshot.png");try{const o=await(await fetch(t,{method:"POST",mode:"cors",headers:n,body:s})).json();return Eq++,this.canvas.style.display="none",o}catch(r){return console.error("Screenshot-Upload error:",r),null}finally{}}stop(){Nd=!1,this.canvas&&(this.canvas.style.display="none")}}setTimeout(()=>{hv=new Bq,window.MainRender=hv},1e3);var sp={};/*!
* howler.js v2.2.4
* howlerjs.com
*
* (c) 2013-2020, James Simpson of GoldFire Studios
* goldfirestudios.com
*
* MIT License
*/var gv;function Sq(){return gv||(gv=1,function(e){(function(){var t=function(){this.init()};t.prototype={init:function(){var A=this||i;return A._counter=1e3,A._html5AudioPool=[],A.html5PoolSize=10,A._codecs={},A._howls=[],A._muted=!1,A._volume=1,A._canPlayEvent="canplaythrough",A._navigator=typeof window<"u"&&window.navigator?window.navigator:null,A.masterGain=null,A.noAudio=!1,A.usingWebAudio=!0,A.autoSuspend=!0,A.ctx=null,A.autoUnlock=!0,A._setup(),A},volume:function(A){var h=this||i;if(A=parseFloat(A),h.ctx||f(),typeof A<"u"&&A>=0&&A<=1){if(h._volume=A,h._muted)return h;h.usingWebAudio&&h.masterGain.gain.setValueAtTime(A,i.ctx.currentTime);for(var p=0;p<h._howls.length;p++)if(!h._howls[p]._webAudio)for(var F=h._howls[p]._getSoundIds(),y=0;y<F.length;y++){var E=h._howls[p]._soundById(F[y]);E&&E._node&&(E._node.volume=E._volume*A)}return h}return h._volume},mute:function(A){var h=this||i;h.ctx||f(),h._muted=A,h.usingWebAudio&&h.masterGain.gain.setValueAtTime(A?0:h._volume,i.ctx.currentTime);for(var p=0;p<h._howls.length;p++)if(!h._howls[p]._webAudio)for(var F=h._howls[p]._getSoundIds(),y=0;y<F.length;y++){var E=h._howls[p]._soundById(F[y]);E&&E._node&&(E._node.muted=A?!0:E._muted)}return h},stop:function(){for(var A=this||i,h=0;h<A._howls.length;h++)A._howls[h].stop();return A},unload:function(){for(var A=this||i,h=A._howls.length-1;h>=0;h--)A._howls[h].unload();return A.usingWebAudio&&A.ctx&&typeof A.ctx.close<"u"&&(A.ctx.close(),A.ctx=null,f()),A},codecs:function(A){return(this||i)._codecs[A.replace(/^x-/,"")]},_setup:function(){var A=this||i;if(A.state=A.ctx&&A.ctx.state||"suspended",A._autoSuspend(),!A.usingWebAudio)if(typeof Audio<"u")try{var h=new Audio;typeof h.oncanplaythrough>"u"&&(A._canPlayEvent="canplay")}catch{A.noAudio=!0}else A.noAudio=!0;try{var h=new Audio;h.muted&&(A.noAudio=!0)}catch{}return A.noAudio||A._setupCodecs(),A},_setupCodecs:function(){var A=this||i,h=null;try{h=typeof Audio<"u"?new Audio:null}catch{return A}if(!h||typeof h.canPlayType!="function")return A;var p=h.canPlayType("audio/mpeg;").replace(/^no$/,""),F=A._navigator?A._navigator.userAgent:"",y=F.match(/OPR\/(\d+)/g),E=y&&parseInt(y[0].split("/")[1],10)<33,w=F.indexOf("Safari")!==-1&&F.indexOf("Chrome")===-1,C=F.match(/Version\/(.*?) /),B=w&&C&&parseInt(C[1],10)<15;return A._codecs={mp3:!!(!E&&(p||h.canPlayType("audio/mp3;").replace(/^no$/,""))),mpeg:!!p,opus:!!h.canPlayType('audio/ogg; codecs="opus"').replace(/^no$/,""),ogg:!!h.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),oga:!!h.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),wav:!!(h.canPlayType('audio/wav; codecs="1"')||h.canPlayType("audio/wav")).replace(/^no$/,""),aac:!!h.canPlayType("audio/aac;").replace(/^no$/,""),caf:!!h.canPlayType("audio/x-caf;").replace(/^no$/,""),m4a:!!(h.canPlayType("audio/x-m4a;")||h.canPlayType("audio/m4a;")||h.canPlayType("audio/aac;")).replace(/^no$/,""),m4b:!!(h.canPlayType("audio/x-m4b;")||h.canPlayType("audio/m4b;")||h.canPlayType("audio/aac;")).replace(/^no$/,""),mp4:!!(h.canPlayType("audio/x-mp4;")||h.canPlayType("audio/mp4;")||h.canPlayType("audio/aac;")).replace(/^no$/,""),weba:!!(!B&&h.canPlayType('audio/webm; codecs="vorbis"').replace(/^no$/,"")),webm:!!(!B&&h.canPlayType('audio/webm; codecs="vorbis"').replace(/^no$/,"")),dolby:!!h.canPlayType('audio/mp4; codecs="ec-3"').replace(/^no$/,""),flac:!!(h.canPlayType("audio/x-flac;")||h.canPlayType("audio/flac;")).replace(/^no$/,"")},A},_unlockAudio:function(){var A=this||i;if(!(A._audioUnlocked||!A.ctx)){A._audioUnlocked=!1,A.autoUnlock=!1,!A._mobileUnloaded&&A.ctx.sampleRate!==44100&&(A._mobileUnloaded=!0,A.unload()),A._scratchBuffer=A.ctx.createBuffer(1,1,22050);var h=function(p){for(;A._html5AudioPool.length<A.html5PoolSize;)try{var F=new Audio;F._unlocked=!0,A._releaseHtml5Audio(F)}catch{A.noAudio=!0;break}for(var y=0;y<A._howls.length;y++)if(!A._howls[y]._webAudio)for(var E=A._howls[y]._getSoundIds(),w=0;w<E.length;w++){var C=A._howls[y]._soundById(E[w]);C&&C._node&&!C._node._unlocked&&(C._node._unlocked=!0,C._node.load())}A._autoResume();var B=A.ctx.createBu
* Spatial Plugin - Adds support for stereo and 3D audio where Web Audio is supported.
*
* howler.js v2.2.4
* howlerjs.com
*
* (c) 2013-2020, James Simpson of GoldFire Studios
* goldfirestudios.com
*
* MIT License
*/(function(){HowlerGlobal.prototype._pos=[0,0,0],HowlerGlobal.prototype._orientation=[0,0,-1,0,1,0],HowlerGlobal.prototype.stereo=function(i){var n=this;if(!n.ctx||!n.ctx.listener)return n;for(var a=n._howls.length-1;a>=0;a--)n._howls[a].stereo(i);return n},HowlerGlobal.prototype.pos=function(i,n,a){var s=this;if(!s.ctx||!s.ctx.listener)return s;if(n=typeof n!="number"?s._pos[1]:n,a=typeof a!="number"?s._pos[2]:a,typeof i=="number")s._pos=[i,n,a],typeof s.ctx.listener.positionX<"u"?(s.ctx.listener.positionX.setTargetAtTime(s._pos[0],Howler.ctx.currentTime,.1),s.ctx.listener.positionY.setTargetAtTime(s._pos[1],Howler.ctx.currentTime,.1),s.ctx.listener.positionZ.setTargetAtTime(s._pos[2],Howler.ctx.currentTime,.1)):s.ctx.listener.setPosition(s._pos[0],s._pos[1],s._pos[2]);else return s._pos;return s},HowlerGlobal.prototype.orientation=function(i,n,a,s,r,o){var l=this;if(!l.ctx||!l.ctx.listener)return l;var u=l._orientation;if(n=typeof n!="number"?u[1]:n,a=typeof a!="number"?u[2]:a,s=typeof s!="number"?u[3]:s,r=typeof r!="number"?u[4]:r,o=typeof o!="number"?u[5]:o,typeof i=="number")l._orientation=[i,n,a,s,r,o],typeof l.ctx.listener.forwardX<"u"?(l.ctx.listener.forwardX.setTargetAtTime(i,Howler.ctx.currentTime,.1),l.ctx.listener.forwardY.setTargetAtTime(n,Howler.ctx.currentTime,.1),l.ctx.listener.forwardZ.setTargetAtTime(a,Howler.ctx.currentTime,.1),l.ctx.listener.upX.setTargetAtTime(s,Howler.ctx.currentTime,.1),l.ctx.listener.upY.setTargetAtTime(r,Howler.ctx.currentTime,.1),l.ctx.listener.upZ.setTargetAtTime(o,Howler.ctx.currentTime,.1)):l.ctx.listener.setOrientation(i,n,a,s,r,o);else return u;return l},Howl.prototype.init=function(i){return function(n){var a=this;return a._orientation=n.orientation||[1,0,0],a._stereo=n.stereo||null,a._pos=n.pos||null,a._pannerAttr={coneInnerAngle:typeof n.coneInnerAngle<"u"?n.coneInnerAngle:360,coneOuterAngle:typeof n.coneOuterAngle<"u"?n.coneOuterAngle:360,coneOuterGain:typeof n.coneOuterGain<"u"?n.coneOuterGain:0,distanceModel:typeof n.distanceModel<"u"?n.distanceModel:"inverse",maxDistance:typeof n.maxDistance<"u"?n.maxDistance:1e4,panningModel:typeof n.panningModel<"u"?n.panningModel:"HRTF",refDistance:typeof n.refDistance<"u"?n.refDistance:1,rolloffFactor:typeof n.rolloffFactor<"u"?n.rolloffFactor:1},a._onstereo=n.onstereo?[{fn:n.onstereo}]:[],a._onpos=n.onpos?[{fn:n.onpos}]:[],a._onorientation=n.onorientation?[{fn:n.onorientation}]:[],i.call(this,n)}}(Howl.prototype.init),Howl.prototype.stereo=function(i,n){var a=this;if(!a._webAudio)return a;if(a._state!=="loaded")return a._queue.push({event:"stereo",action:function(){a.stereo(i,n)}}),a;var s=typeof Howler.ctx.createStereoPanner>"u"?"spatial":"stereo";if(typeof n>"u")if(typeof i=="number")a._stereo=i,a._pos=[i,0,0];else return a._stereo;for(var r=a._getSoundIds(n),o=0;o<r.length;o++){var l=a._soundById(r[o]);if(l)if(typeof i=="number")l._stereo=i,l._pos=[i,0,0],l._node&&(l._pannerAttr.panningModel="equalpower",(!l._panner||!l._panner.pan)&&t(l,s),s==="spatial"?typeof l._panner.positionX<"u"?(l._panner.positionX.setValueAtTime(i,Howler.ctx.currentTime),l._panner.positionY.setValueAtTime(0,Howler.ctx.currentTime),l._panner.positionZ.setValueAtTime(0,Howler.ctx.currentTime)):l._panner.setPosition(i,0,0):l._panner.pan.setValueAtTime(i,Howler.ctx.currentTime)),a._emit("stereo",l._id);else return l._stereo}return a},Howl.prototype.pos=function(i,n,a,s){var r=this;if(!r._webAudio)return r;if(r._state!=="loaded")return r._queue.push({event:"pos",action:function(){r.pos(i,n,a,s)}}),r;if(n=typeof n!="number"?0:n,a=typeof a!="number"?-.5:a,typeof s>"u")if(typeof i=="number")r._pos=[i,n,a];else return r._pos;for(var o=r._getSoundIds(s),l=0;l<o.length;l++){var u=r._soundById(o[l]);if(u)if(typeof i=="number")u._pos=[i,n,a],u._node&&((!u._panner||u._panner.pan)&&t(u,"spatial"),typeof u._panner.positionX<"u"?(u._panner.positionX.setValueAtTime(i,Howler.ctx.currentTime),u._panner.positionY.setValueAtTime(n,Howler.ctx.currentTime),u._panner.positionZ.setValueAtTime(a,Howler.ctx.currentTime)):u._panner.setPosition(i,n,a)),r._emit("pos",u._id);else
* {@link https://github.com/muaz-khan/RecordRTC|RecordRTC} is a WebRTC JavaScript library for audio/video as well as screen activity recording. It supports Chrome, Firefox, Opera, Android, and Microsoft Edge. Platforms: Linux, Mac and Windows.
* @summary Record audio, video or screen inside the browser.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef RecordRTC
* @class
* @example
* var recorder = RecordRTC(mediaStream or [arrayOfMediaStream], {
* type: 'video', // audio or video or gif or canvas
* recorderType: MediaStreamRecorder || CanvasRecorder || StereoAudioRecorder || Etc
* });
* recorder.startRecording();
* @see For further information:
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
* @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, desiredSampRate: 16000, video: HTMLVideoElement, etc.}
*/function t(I,D){if(!I)throw"First parameter is required.";D=D||{type:"video"},D=new i(I,D);var O=this;function Ae(de){return D.disableLogs||console.log("RecordRTC version: ",O.version),de&&(D=new i(I,de)),D.disableLogs||console.log("started recording "+D.type+" stream."),re?(re.clearRecordedData(),re.record(),ke("recording"),O.recordingDuration&&ue(),O):(te(function(){O.recordingDuration&&ue()}),O)}function te(de){de&&(D.initCallback=function(){de(),de=D.initCallback=null});var De=new n(I,D);re=new De(I,D),re.record(),ke("recording"),D.disableLogs||console.log("Initialized recorderType:",re.constructor.name,"for output-type:",D.type)}function ae(de){if(de=de||function(){},!re){K();return}if(O.state==="paused"){O.resumeRecording(),setTimeout(function(){ae(de)},1);return}O.state!=="recording"&&!D.disableLogs&&console.warn('Recording state should be: "recording", however current state is: ',O.state),D.disableLogs||console.log("Stopped recording "+D.type+" stream."),D.type!=="gif"?re.stop(De):(re.stop(),De()),ke("stopped");function De(Ie){if(!re){typeof de.call=="function"?de.call(O,""):de("");return}Object.keys(re).forEach(function(Ne){typeof re[Ne]!="function"&&(O[Ne]=re[Ne])});var fe=re.blob;if(!fe)if(Ie)re.blob=fe=Ie;else throw"Recording failed.";if(fe&&!D.disableLogs&&console.log(fe.type,"->",w(fe.size)),de){var be;try{be=f.createObjectURL(fe)}catch{}typeof de.call=="function"?de.call(O,be):de(be)}D.autoWriteToDisk&&we(function(Ne){var Ue={};Ue[D.type+"Blob"]=Ne,ge.Store(Ue)})}}function he(){if(!re){K();return}if(O.state!=="recording"){D.disableLogs||console.warn("Unable to pause the recording. Recording state: ",O.state);return}ke("paused"),re.pause(),D.disableLogs||console.log("Paused recording.")}function X(){if(!re){K();return}if(O.state!=="paused"){D.disableLogs||console.warn("Unable to resume the recording. Recording state: ",O.state);return}ke("recording"),re.resume(),D.disableLogs||console.log("Resumed recording.")}function ve(de){postMessage(new FileReaderSync().readAsDataURL(de))}function we(de,De){if(!de)throw"Pass a callback function over getDataURL.";var Ie=De?De.blob:(re||{}).blob;if(!Ie){D.disableLogs||console.warn("Blob encoder did not finish its job yet."),setTimeout(function(){we(de,De)},1e3);return}if(typeof Worker<"u"&&!navigator.mozGetUserMedia){var fe=Ne(ve);fe.onmessage=function(Ue){de(Ue.data)},fe.postMessage(Ie)}else{var be=new FileReader;be.readAsDataURL(Ie),be.onload=function(Ue){de(Ue.target.result)}}function Ne(Ue){try{var je=f.createObjectURL(new Blob([Ue.toString(),"this.onmessage = function (eee) {"+Ue.name+"(eee.data);}"],{type:"application/javascript"})),Ge=new Worker(je);return f.revokeObjectURL(je),Ge}catch{}}}function ue(de){if(de=de||0,O.state==="paused"){setTimeout(function(){ue(de)},1e3);return}if(O.state!=="stopped"){if(de>=O.recordingDuration){ae(O.onRecordingStopped);return}de+=1e3,setTimeout(function(){ue(de)},1e3)}}function ke(de){O&&(O.state=de,typeof O.onStateChanged.call=="function"?O.onStateChanged.call(O,de):O.onStateChanged(de))}var G='It seems that recorder is destroyed or "startRecording" is not invoked for '+D.type+" recorder.";function K(){D.disableLogs!==!0&&console.warn(G)}var re,Ce={startRecording:Ae,stopRecording:ae,pauseRecording:he,resumeRecording:X,initRecorder:te,setRecordingDuration:function(de,De){if(typeof de>"u")throw"recordingDuration is required.";if(typeof de!="number")throw"recordingDuration must be a number.";return O.recordingDuration=de,O.onRecordingStopped=De||function(){},{onRecordingStopped:function(Ie){O.onRecordingStopped=Ie}}},clearRecordedData:function(){if(!re){K();return}re.clearRecordedData(),D.disableLogs||console.log("Cleared old recorded data.")},getBlob:function(){if(!re){K();return}return re.blob},getDataURL:we,toURL:function(){if(!re){K();return}return f.createObjectURL(re.blob)},getInternalRecorder:function(){return re},save:function(de){if(!re){K();return}C(re.blob,de)},getFromDisk:function(de){if(!re){K();return}t.getFromDisk(D.type,de)},setAdvertisementArray:function(de){D.advertisement=[];for(var De=de.length,Ie=0;Ie<De;I
* {@link RecordRTCConfiguration} is an inner/private helper for {@link RecordRTC}.
* @summary It configures the 2nd parameter passed over {@link RecordRTC} and returns a valid "config" object.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef RecordRTCConfiguration
* @class
* @example
* var options = RecordRTCConfiguration(mediaStream, options);
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, getNativeBlob:true, etc.}
*/function i(I,D){return!D.recorderType&&!D.type&&(D.audio&&D.video?D.type="video":D.audio&&!D.video&&(D.type="audio")),D.recorderType&&!D.type&&(D.recorderType===ce||D.recorderType===Z||typeof q<"u"&&D.recorderType===q?D.type="video":D.recorderType===le?D.type="gif":D.recorderType===W?D.type="audio":D.recorderType===R&&(S(I,"audio").length&&S(I,"video").length||!S(I,"audio").length&&S(I,"video").length?D.type="video":S(I,"audio").length&&!S(I,"video").length&&(D.type="audio"))),typeof R<"u"&&typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&(D.mimeType||(D.mimeType="video/webm"),D.type||(D.type=D.mimeType.split("/")[0]),D.bitsPerSecond),D.type||(D.mimeType&&(D.type=D.mimeType.split("/")[0]),D.type||(D.type="audio")),D}/**
* {@link GetRecorderType} is an inner/private helper for {@link RecordRTC}.
* @summary It returns best recorder-type available for your browser.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef GetRecorderType
* @class
* @example
* var RecorderType = GetRecorderType(options);
* var recorder = new RecorderType(options);
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {type:"video", disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
*/function n(I,D){var O;return(F||A||h)&&(O=W),typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&!F&&(O=R),D.type==="video"&&(F||h)&&(O=ce,typeof q<"u"&&typeof ReadableStream<"u"&&(O=q)),D.type==="gif"&&(O=le),D.type==="canvas"&&(O=Z),Q()&&O!==Z&&O!==le&&typeof MediaRecorder<"u"&&"requestData"in MediaRecorder.prototype&&(S(I,"video").length||S(I,"audio").length)&&(D.type==="audio"?typeof MediaRecorder.isTypeSupported=="function"&&MediaRecorder.isTypeSupported("audio/webm")&&(O=R):typeof MediaRecorder.isTypeSupported=="function"&&MediaRecorder.isTypeSupported("video/webm")&&(O=R)),I instanceof Array&&I.length&&(O=me),D.recorderType&&(O=D.recorderType),!D.disableLogs&&O&&O.name&&console.log("Using recorderType:",O.name||O.constructor.name),!O&&y&&(O=R),O}/**
* MRecordRTC runs on top of {@link RecordRTC} to bring multiple recordings in a single place, by providing simple API.
* @summary MRecordRTC stands for "Multiple-RecordRTC".
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef MRecordRTC
* @class
* @example
* var recorder = new MRecordRTC();
* recorder.addStream(MediaStream);
* recorder.mediaType = {
* audio: true, // or StereoAudioRecorder or MediaStreamRecorder
* video: true, // or WhammyRecorder or MediaStreamRecorder or WebAssemblyRecorder or CanvasRecorder
* gif: true // or GifRecorder
* };
* // mimeType is optional and should be set only in advance cases.
* recorder.mimeType = {
* audio: 'audio/wav',
* video: 'video/webm',
* gif: 'image/gif'
* };
* recorder.startRecording();
* @see For further information:
* @see {@link https://github.com/muaz-khan/RecordRTC/tree/master/MRecordRTC|MRecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @requires {@link RecordRTC}
*/function a(I){this.addStream=function(D){D&&(I=D)},this.mediaType={audio:!0,video:!0},this.startRecording=function(){var D=this.mediaType,O,Ae=this.mimeType||{audio:null,video:null,gif:null};if(typeof D.audio!="function"&&Q()&&!S(I,"audio").length&&(D.audio=!1),typeof D.video!="function"&&Q()&&!S(I,"video").length&&(D.video=!1),typeof D.gif!="function"&&Q()&&!S(I,"video").length&&(D.gif=!1),!D.audio&&!D.video&&!D.gif)throw"MediaStream must have either audio or video tracks.";if(D.audio&&(O=null,typeof D.audio=="function"&&(O=D.audio),this.audioRecorder=new t(I,{type:"audio",bufferSize:this.bufferSize,sampleRate:this.sampleRate,numberOfAudioChannels:this.numberOfAudioChannels||2,disableLogs:this.disableLogs,recorderType:O,mimeType:Ae.audio,timeSlice:this.timeSlice,onTimeStamp:this.onTimeStamp}),D.video||this.audioRecorder.startRecording()),D.video){O=null,typeof D.video=="function"&&(O=D.video);var te=I;if(Q()&&D.audio&&typeof D.audio=="function"){var ae=S(I,"video")[0];p?(te=new E,te.addTrack(ae),O&&O===ce&&(O=R)):(te=new E,te.addTrack(ae))}this.videoRecorder=new t(te,{type:"video",video:this.video,canvas:this.canvas,frameInterval:this.frameInterval||10,disableLogs:this.disableLogs,recorderType:O,mimeType:Ae.video,timeSlice:this.timeSlice,onTimeStamp:this.onTimeStamp,workerPath:this.workerPath,webAssemblyPath:this.webAssemblyPath,frameRate:this.frameRate,bitrate:this.bitrate}),D.audio||this.videoRecorder.startRecording()}if(D.audio&&D.video){var he=this,X=Q()===!0;(D.audio instanceof W&&D.video||D.audio!==!0&&D.video!==!0&&D.audio!==D.video)&&(X=!1),X===!0?(he.audioRecorder=null,he.videoRecorder.startRecording()):he.videoRecorder.initRecorder(function(){he.audioRecorder.initRecorder(function(){he.videoRecorder.startRecording(),he.audioRecorder.startRecording()})})}D.gif&&(O=null,typeof D.gif=="function"&&(O=D.gif),this.gifRecorder=new t(I,{type:"gif",frameRate:this.frameRate||200,quality:this.quality||10,disableLogs:this.disableLogs,recorderType:O,mimeType:Ae.gif}),this.gifRecorder.startRecording())},this.stopRecording=function(D){D=D||function(){},this.audioRecorder&&this.audioRecorder.stopRecording(function(O){D(O,"audio")}),this.videoRecorder&&this.videoRecorder.stopRecording(function(O){D(O,"video")}),this.gifRecorder&&this.gifRecorder.stopRecording(function(O){D(O,"gif")})},this.pauseRecording=function(){this.audioRecorder&&this.audioRecorder.pauseRecording(),this.videoRecorder&&this.videoRecorder.pauseRecording(),this.gifRecorder&&this.gifRecorder.pauseRecording()},this.resumeRecording=function(){this.audioRecorder&&this.audioRecorder.resumeRecording(),this.videoRecorder&&this.videoRecorder.resumeRecording(),this.gifRecorder&&this.gifRecorder.resumeRecording()},this.getBlob=function(D){var O={};return this.audioRecorder&&(O.audio=this.audioRecorder.getBlob()),this.videoRecorder&&(O.video=this.videoRecorder.getBlob()),this.gifRecorder&&(O.gif=this.gifRecorder.getBlob()),D&&D(O),O},this.destroy=function(){this.audioRecorder&&(this.audioRecorder.destroy(),this.audioRecorder=null),this.videoRecorder&&(this.videoRecorder.destroy(),this.videoRecorder=null),this.gifRecorder&&(this.gifRecorder.destroy(),this.gifRecorder=null)},this.getDataURL=function(D){this.getBlob(function(te){te.audio&&te.video?O(te.audio,function(ae){O(te.video,function(he){D({audio:ae,video:he})})}):te.audio?O(te.audio,function(ae){D({audio:ae})}):te.video&&O(te.video,function(ae){D({video:ae})})});function O(te,ae){if(typeof Worker<"u"){var he=Ae(function(we){postMessage(new FileReaderSync().readAsDataURL(we))});he.onmessage=function(ve){ae(ve.data)},he.postMessage(te)}else{var X=new FileReader;X.readAsDataURL(te),X.onload=function(ve){ae(ve.target.result)}}}function Ae(te){var ae=f.createObjectURL(new Blob([te.toString(),"this.onmessage = function (eee) {"+te.name+"(eee.data);}"],{type:"application/javascript"})),he=new Worker(ae),X;if(typeof f<"u")X=f;else if(typeof webkitURL<"u")X=webkitURL;else throw"Neither URL nor webkitURL detected.";return X.revokeObjectURL(ae),he}},this.writeToDisk=function(){t.writeToDisk({audio:this.audioRecorder,vid
* Storage is a standalone object used by {@link RecordRTC} to store reusable objects e.g. "new AudioContext".
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @example
* Storage.AudioContext === webkitAudioContext
* @property {webkitAudioContext} AudioContext - Keeps a reference to AudioContext object.
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
*/var z={};typeof u<"u"?z.AudioContext=u:typeof webkitAudioContext<"u"&&(z.AudioContext=webkitAudioContext),typeof t<"u"&&(t.Storage=z);function Q(){if(p||y||A)return!0;var I=navigator.userAgent,D=""+parseFloat(navigator.appVersion),O=parseInt(navigator.appVersion,10),Ae,te;return(F||h)&&(Ae=I.indexOf("Chrome"),D=I.substring(Ae+7)),(te=D.indexOf(";"))!==-1&&(D=D.substring(0,te)),(te=D.indexOf(" "))!==-1&&(D=D.substring(0,te)),O=parseInt(""+D,10),isNaN(O)&&(D=""+parseFloat(navigator.appVersion),O=parseInt(navigator.appVersion,10)),O>=49}/**
* MediaStreamRecorder is an abstraction layer for {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}. It is used by {@link RecordRTC} to record MediaStream(s) in both Chrome and Firefox.
* @summary Runs top over {@link https://w3c.github.io/mediacapture-record/MediaRecorder.html|MediaRecorder API}.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://github.com/muaz-khan|Muaz Khan}
* @typedef MediaStreamRecorder
* @class
* @example
* var config = {
* mimeType: 'video/webm', // vp8, vp9, h264, mkv, opus/vorbis
* audioBitsPerSecond : 256 * 8 * 1024,
* videoBitsPerSecond : 256 * 8 * 1024,
* bitsPerSecond: 256 * 8 * 1024, // if this is provided, skip above two
* checkForInactiveTracks: true,
* timeSlice: 1000, // concatenate intervals based blobs
* ondataavailable: function() {} // get intervals based blobs
* }
* var recorder = new MediaStreamRecorder(mediaStream, config);
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
*
* // or
* var blob = recorder.blob;
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {disableLogs:true, initCallback: function, mimeType: "video/webm", timeSlice: 1000}
* @throws Will throw an error if first argument "MediaStream" is missing. Also throws error if "MediaRecorder API" are not supported by the browser.
*/function R(I,D){var ue=this;if(typeof I>"u")throw'First argument "MediaStream" is required.';if(typeof MediaRecorder>"u")throw"Your browser does not support the Media Recorder API. Please try other modules e.g. WhammyRecorder or StereoAudioRecorder.";if(D=D||{mimeType:"video/webm"},D.type==="audio"){if(S(I,"video").length&&S(I,"audio").length){var O;navigator.mozGetUserMedia?(O=new E,O.addTrack(S(I,"audio")[0])):O=new E(S(I,"audio")),I=O}(!D.mimeType||D.mimeType.toString().toLowerCase().indexOf("audio")===-1)&&(D.mimeType=F?"audio/webm":"audio/ogg"),D.mimeType&&D.mimeType.toString().toLowerCase()!=="audio/ogg"&&navigator.mozGetUserMedia&&(D.mimeType="audio/ogg")}var Ae=[];this.getArrayOfBlobs=function(){return Ae},this.record=function(){ue.blob=null,ue.clearRecordedData(),ue.timestamps=[],we=[],Ae=[];var ke=D;D.disableLogs||console.log("Passing following config over MediaRecorder API.",ke),X&&(X=null),F&&!Q()&&(ke="video/vp8"),typeof MediaRecorder.isTypeSupported=="function"&&ke.mimeType&&(MediaRecorder.isTypeSupported(ke.mimeType)||(D.disableLogs||console.warn("MediaRecorder API seems unable to record mimeType:",ke.mimeType),ke.mimeType=D.type==="audio"?"audio/webm":"video/webm"));try{X=new MediaRecorder(I,ke),D.mimeType=ke.mimeType}catch{X=new MediaRecorder(I)}ke.mimeType&&!MediaRecorder.isTypeSupported&&"canRecordMimeType"in X&&X.canRecordMimeType(ke.mimeType)===!1&&(D.disableLogs||console.warn("MediaRecorder API seems unable to record mimeType:",ke.mimeType)),X.ondataavailable=function(G){if(G.data&&we.push("ondataavailable: "+w(G.data.size)),typeof D.timeSlice=="number"){if(G.data&&G.data.size&&(Ae.push(G.data),te(),typeof D.ondataavailable=="function")){var K=D.getNativeBlob?G.data:new Blob([G.data],{type:ae(ke)});D.ondataavailable(K)}return}if(!G.data||!G.data.size||G.data.size<100||ue.blob){ue.recordingCallback&&(ue.recordingCallback(new Blob([],{type:ae(ke)})),ue.recordingCallback=null);return}ue.blob=D.getNativeBlob?G.data:new Blob([G.data],{type:ae(ke)}),ue.recordingCallback&&(ue.recordingCallback(ue.blob),ue.recordingCallback=null)},X.onstart=function(){we.push("started")},X.onpause=function(){we.push("paused")},X.onresume=function(){we.push("resumed")},X.onstop=function(){we.push("stopped")},X.onerror=function(G){G&&(G.name||(G.name="UnknownError"),we.push("error: "+G),D.disableLogs||(G.name.toString().toLowerCase().indexOf("invalidstate")!==-1?console.error("The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.",G):G.name.toString().toLowerCase().indexOf("notsupported")!==-1?console.error("MIME type (",ke.mimeType,") is not supported.",G):G.name.toString().toLowerCase().indexOf("security")!==-1?console.error("MediaRecorder security error",G):G.name==="OutOfMemory"?console.error("The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="IllegalStreamModification"?console.error("A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="OtherRecordingError"?console.error("Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.",G):G.name==="GenericError"?console.error("The UA cannot provide the codec or recording option that has been requested.",G):console.error("MediaRecorder Error",G)),function(K){if(!ue.manuallyStopped&&X&&X.state==="inactive"){delete D.timeslice,X.start(10*60*1e3);return}setTimeout(K,1e3)}(),X.state!=="inactive"&&X.state!=="stopped"&&X.stop())},typeof D.timeSlice=="number"?(te(),X.start(D.timeSlice)):X.start(36e5),D.initCallback&&D.initCallback()},this.timestamps=[];function te(){ue.timestamps.push(new Date().getTime()),typeof D.onTimeStamp=="function"&&D.onTimeStamp(ue.timestamps[ue.timestamps.length-1],ue.timestamps)}function ae(ke){return X&&X.m
* StereoAudioRecorder is a standalone class used by {@link RecordRTC} to bring "stereo" audio-recording in chrome.
* @summary JavaScript standalone object for stereo audio recording.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef StereoAudioRecorder
* @class
* @example
* var recorder = new StereoAudioRecorder(MediaStream, {
* sampleRate: 44100,
* bufferSize: 4096
* });
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {sampleRate: 44100, bufferSize: 4096, numberOfAudioChannels: 1, etc.}
*/function W(I,D){if(!S(I,"audio").length)throw"Your stream has no audio tracks.";D=D||{};var O=this,Ae=[],te=[],ae=!1,he=0,X,ve=2,we=D.desiredSampRate;D.leftChannel===!0&&(ve=1),D.numberOfAudioChannels===1&&(ve=1),(!ve||ve<1)&&(ve=2),D.disableLogs||console.log("StereoAudioRecorder is set to record number of channels: "+ve),typeof D.checkForInactiveTracks>"u"&&(D.checkForInactiveTracks=!0);function ue(){if(D.checkForInactiveTracks===!1)return!0;if("active"in I){if(!I.active)return!1}else if("ended"in I&&I.ended)return!1;return!0}this.record=function(){if(ue()===!1)throw"Please make sure MediaStream is active.";Ie(),be=De=!1,ae=!0,typeof D.timeSlice<"u"&&je()};function ke(Ge,Ye){function et(ze,Je){var At=ze.numberOfAudioChannels,Ze=ze.leftBuffers.slice(0),bt=ze.rightBuffers.slice(0),kt=ze.sampleRate,_t=ze.internalInterleavedLength,Lt=ze.desiredSampRate;At===2&&(Ze=gi(Ze,_t),bt=gi(bt,_t),Lt&&(Ze=Dt(Ze,Lt,kt),bt=Dt(bt,Lt,kt))),At===1&&(Ze=gi(Ze,_t),Lt&&(Ze=Dt(Ze,Lt,kt))),Lt&&(kt=Lt);function Dt(Di,In,un){var pi=Math.round(Di.length*(In/un)),nt=[],V=Number((Di.length-1)/(pi-1));nt[0]=Di[0];for(var ie=1;ie<pi-1;ie++){var _e=ie*V,qe=Number(Math.floor(_e)).toFixed(),st=Number(Math.ceil(_e)).toFixed(),Rt=_e-qe;nt[ie]=en(Di[qe],Di[st],Rt)}return nt[pi-1]=Di[Di.length-1],nt}function en(Di,In,un){return Di+(In-Di)*un}function gi(Di,In){for(var un=new Float64Array(In),pi=0,nt=Di.length,V=0;V<nt;V++){var ie=Di[V];un.set(ie,pi),pi+=ie.length}return un}function as(Di,In){for(var un=Di.length+In.length,pi=new Float64Array(un),nt=0,V=0;V<un;)pi[V++]=Di[nt],pi[V++]=In[nt],nt++;return pi}function ea(Di,In,un){for(var pi=un.length,nt=0;nt<pi;nt++)Di.setUint8(In+nt,un.charCodeAt(nt))}var _a;At===2&&(_a=as(Ze,bt)),At===1&&(_a=Ze);var Ia=_a.length,Ns=44+Ia*2,Pa=new ArrayBuffer(Ns),ci=new DataView(Pa);ea(ci,0,"RIFF"),ci.setUint32(4,36+Ia*2,!0),ea(ci,8,"WAVE"),ea(ci,12,"fmt "),ci.setUint32(16,16,!0),ci.setUint16(20,1,!0),ci.setUint16(22,At,!0),ci.setUint32(24,kt,!0),ci.setUint32(28,kt*At*2,!0),ci.setUint16(32,At*2,!0),ci.setUint16(34,16,!0),ea(ci,36,"data"),ci.setUint32(40,Ia*2,!0);for(var Wl=Ia,xr=44,Tr=1,Ir=0;Ir<Wl;Ir++)ci.setInt16(xr,_a[Ir]*(32767*Tr),!0),xr+=2;if(Je)return Je({buffer:Pa,view:ci});postMessage({buffer:Pa,view:ci})}if(Ge.noWorker){et(Ge,function(ze){Ye(ze.buffer,ze.view)});return}var ye=G(et);ye.onmessage=function(ze){Ye(ze.data.buffer,ze.data.view),f.revokeObjectURL(ye.workerURL),ye.terminate()},ye.postMessage(Ge)}function G(Ge){var Ye=f.createObjectURL(new Blob([Ge.toString(),";this.onmessage = function (eee) {"+Ge.name+"(eee.data);}"],{type:"application/javascript"})),et=new Worker(Ye);return et.workerURL=Ye,et}this.stop=function(Ge){Ge=Ge||function(){},ae=!1,ke({desiredSampRate:we,sampleRate:de,numberOfAudioChannels:ve,internalInterleavedLength:he,leftBuffers:Ae,rightBuffers:ve===1?[]:te,noWorker:D.noWorker},function(Ye,et){O.blob=new Blob([et],{type:"audio/wav"}),O.buffer=new ArrayBuffer(et.buffer.byteLength),O.view=et,O.sampleRate=we||de,O.bufferSize=Be,O.length=he,be=!1,Ge&&Ge(O.blob)})},typeof t.Storage>"u"&&(t.Storage={AudioContextConstructor:null,AudioContext:window.AudioContext||window.webkitAudioContext}),(!t.Storage.AudioContextConstructor||t.Storage.AudioContextConstructor.state==="closed")&&(t.Storage.AudioContextConstructor=new t.Storage.AudioContext);var K=t.Storage.AudioContextConstructor,re=K.createMediaStreamSource(I),Ce=[0,256,512,1024,2048,4096,8192,16384],Be=typeof D.bufferSize>"u"?4096:D.bufferSize;if(Ce.indexOf(Be)===-1&&(D.disableLogs||console.log("Legal values for buffer-size are "+JSON.stringify(Ce,null," "))),K.createJavaScriptNode)X=K.createJavaScriptNode(Be,ve,ve);else if(K.createScriptProcessor)X=K.createScriptProcessor(Be,ve,ve);else throw"WebAudio API has no support on this browser.";re.connect(X),D.bufferSize||(Be=X.bufferSize);var de=typeof D.sampleRate<"u"?D.sampleRate:K.sampleRate||44100;(de<22050||de>96e3)&&(D.disableLogs||console.log("sample-rate must be under range 22050 and 96000.")),D.disableLogs||D.desiredSampRate&&console.log("Desired sample-rate: "+D.desiredSampRate);var De=!1;th
* CanvasRecorder is a standalone class used by {@link RecordRTC} to bring HTML5-Canvas recording into video WebM. It uses HTML2Canvas library and runs top over {@link Whammy}.
* @summary HTML2Canvas recording into video WebM.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef CanvasRecorder
* @class
* @example
* var recorder = new CanvasRecorder(htmlElement, { disableLogs: true, useWhammyRecorder: true });
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {HTMLElement} htmlElement - querySelector/getElementById/getElementsByTagName[0]/etc.
* @param {object} config - {disableLogs:true, initCallback: function}
*/function Z(I,D){if(typeof html2canvas>"u")throw"Please link: https://www.webrtc-experiment.com/screenshot.js";D=D||{},D.frameInterval||(D.frameInterval=10);var O=!1;["captureStream","mozCaptureStream","webkitCaptureStream"].forEach(function(Ce){Ce in document.createElement("canvas")&&(O=!0)});var Ae=(!!window.webkitRTCPeerConnection||!!window.webkitGetUserMedia)&&!!window.chrome,te=50,ae=navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./);Ae&&ae&&ae[2]&&(te=parseInt(ae[2],10)),Ae&&te<52&&(O=!1),D.useWhammyRecorder&&(O=!1);var he,X;if(O)if(D.disableLogs||console.log("Your browser supports both MediRecorder API and canvas.captureStream!"),I instanceof HTMLCanvasElement)he=I;else if(I instanceof CanvasRenderingContext2D)he=I.canvas;else throw"Please pass either HTMLCanvasElement or CanvasRenderingContext2D.";else navigator.mozGetUserMedia&&(D.disableLogs||console.error("Canvas recording is NOT supported in Firefox."));var ve;this.record=function(){if(ve=!0,O&&!D.useWhammyRecorder){var Ce;"captureStream"in he?Ce=he.captureStream(25):"mozCaptureStream"in he?Ce=he.mozCaptureStream(25):"webkitCaptureStream"in he&&(Ce=he.webkitCaptureStream(25));try{var Be=new E;Be.addTrack(S(Ce,"video")[0]),Ce=Be}catch{}if(!Ce)throw"captureStream API are NOT available.";X=new R(Ce,{mimeType:D.mimeType||"video/webm"}),X.record()}else re.frames=[],K=new Date().getTime(),G();D.initCallback&&D.initCallback()},this.getWebPImages=function(Ce){if(I.nodeName.toLowerCase()!=="canvas"){Ce();return}var Be=re.frames.length;re.frames.forEach(function(de,De){var Ie=Be-De;D.disableLogs||console.log(Ie+"/"+Be+" frames remaining"),D.onEncodingCallback&&D.onEncodingCallback(Ie,Be);var fe=de.image.toDataURL("image/webp",1);re.frames[De].image=fe}),D.disableLogs||console.log("Generating WebM"),Ce()},this.stop=function(Ce){ve=!1;var Be=this;if(O&&X){X.stop(Ce);return}this.getWebPImages(function(){re.compile(function(de){D.disableLogs||console.log("Recording finished!"),Be.blob=de,Be.blob.forEach&&(Be.blob=new Blob([],{type:"video/webm"})),Ce&&Ce(Be.blob),re.frames=[]})})};var we=!1;this.pause=function(){if(we=!0,X instanceof R){X.pause();return}},this.resume=function(){if(we=!1,X instanceof R){X.resume();return}ve||this.record()},this.clearRecordedData=function(){ve&&this.stop(ue),ue()};function ue(){re.frames=[],ve=!1,we=!1}this.name="CanvasRecorder",this.toString=function(){return this.name};function ke(){var Ce=document.createElement("canvas"),Be=Ce.getContext("2d");return Ce.width=I.width,Ce.height=I.height,Be.drawImage(I,0,0),Ce}function G(){if(we)return K=new Date().getTime(),setTimeout(G,500);if(I.nodeName.toLowerCase()==="canvas"){var Ce=new Date().getTime()-K;K=new Date().getTime(),re.frames.push({image:ke(),duration:Ce}),ve&&setTimeout(G,D.frameInterval);return}html2canvas(I,{grabMouse:typeof D.showMousePointer>"u"||D.showMousePointer,onrendered:function(Be){var de=new Date().getTime()-K;if(!de)return setTimeout(G,D.frameInterval);K=new Date().getTime(),re.frames.push({image:Be.toDataURL("image/webp",1),duration:de}),ve&&setTimeout(G,D.frameInterval)}})}var K=new Date().getTime(),re=new Fe.Video(100)}typeof t<"u"&&(t.CanvasRecorder=Z);/**
* WhammyRecorder is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It runs top over {@link Whammy}.
* @summary Video recording feature in Chrome.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef WhammyRecorder
* @class
* @example
* var recorder = new WhammyRecorder(mediaStream);
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {disableLogs: true, initCallback: function, video: HTMLVideoElement, etc.}
*/function ce(I,D){D=D||{},D.frameInterval||(D.frameInterval=10),D.disableLogs||console.log("Using frames-interval:",D.frameInterval),this.record=function(){D.width||(D.width=320),D.height||(D.height=240),D.video||(D.video={width:D.width,height:D.height}),D.canvas||(D.canvas={width:D.width,height:D.height}),ve.width=D.canvas.width||320,ve.height=D.canvas.height||240,we=ve.getContext("2d"),D.video&&D.video instanceof HTMLVideoElement?(ue=D.video.cloneNode(),D.initCallback&&D.initCallback()):(ue=document.createElement("video"),U(I,ue),ue.onloadedmetadata=function(){D.initCallback&&D.initCallback()},ue.width=D.video.width,ue.height=D.video.height),ue.muted=!0,ue.play(),ke=new Date().getTime(),G=new Fe.Video,D.disableLogs||(console.log("canvas resolutions",ve.width,"*",ve.height),console.log("video width/height",ue.width||ve.width,"*",ue.height||ve.height)),O(D.frameInterval)};function O(K){K=typeof K<"u"?K:10;var re=new Date().getTime()-ke;if(!re)return setTimeout(O,K,K);if(he)return ke=new Date().getTime(),setTimeout(O,100);ke=new Date().getTime(),ue.paused&&ue.play(),we.drawImage(ue,0,0,ve.width,ve.height),G.frames.push({duration:re,image:ve.toDataURL("image/webp")}),ae||setTimeout(O,K,K)}function Ae(K){var re=-1,Ce=K.length;(function Be(){if(re++,re===Ce){K.callback();return}setTimeout(function(){K.functionToLoop(Be,re)},1)})()}function te(K,re,Ce,Be,de){var De=document.createElement("canvas");De.width=ve.width,De.height=ve.height;var Ie=De.getContext("2d"),fe=[],be=K.length,Ne={r:0,g:0,b:0},Ue=Math.sqrt(Math.pow(255,2)+Math.pow(255,2)+Math.pow(255,2)),je=0,Ge=0,Ye=!1;Ae({length:be,functionToLoop:function(et,ye){var ze,Je,At,Ze=function(){!Ye&&At-ze<=At*Ge||(Ye=!0,fe.push(K[ye])),et()};if(Ye)Ze();else{var bt=new Image;bt.onload=function(){Ie.drawImage(bt,0,0,ve.width,ve.height);var kt=Ie.getImageData(0,0,ve.width,ve.height);ze=0,Je=kt.data.length,At=kt.data.length/4;for(var _t=0;_t<Je;_t+=4){var Lt={r:kt.data[_t],g:kt.data[_t+1],b:kt.data[_t+2]},Dt=Math.sqrt(Math.pow(Lt.r-Ne.r,2)+Math.pow(Lt.g-Ne.g,2)+Math.pow(Lt.b-Ne.b,2));Dt<=Ue*je&&ze++}Ze()},bt.src=K[ye].image}},callback:function(){fe=fe.concat(K.slice(be)),fe.length<=0&&fe.push(K[K.length-1]),de(fe)}})}var ae=!1;this.stop=function(K){K=K||function(){},ae=!0;var re=this;setTimeout(function(){te(G.frames,-1,null,null,function(Ce){G.frames=Ce,D.advertisement&&D.advertisement.length&&(G.frames=D.advertisement.concat(G.frames)),G.compile(function(Be){re.blob=Be,re.blob.forEach&&(re.blob=new Blob([],{type:"video/webm"})),K&&K(re.blob)})})},10)};var he=!1;this.pause=function(){he=!0},this.resume=function(){he=!1,ae&&this.record()},this.clearRecordedData=function(){ae||this.stop(X),X()};function X(){G.frames=[],ae=!0,he=!1}this.name="WhammyRecorder",this.toString=function(){return this.name};var ve=document.createElement("canvas"),we=ve.getContext("2d"),ue,ke,G}typeof t<"u"&&(t.WhammyRecorder=ce);/**
* Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}
* @summary A real time javascript webm encoder based on a canvas hack.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef Whammy
* @class
* @example
* var recorder = new Whammy().Video(15);
* recorder.add(context || canvas || dataURL);
* var output = recorder.compile();
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
*/var Fe=function(){function I(Ae){this.frames=[],this.duration=Ae||1,this.quality=.8}I.prototype.add=function(Ae,te){if("canvas"in Ae&&(Ae=Ae.canvas),"toDataURL"in Ae&&(Ae=Ae.toDataURL("image/webp",this.quality)),!/^data:image\/webp;base64,/ig.test(Ae))throw"Input must be formatted properly as a base64 encoded DataURI of type image/webp";this.frames.push({image:Ae,duration:te||this.duration})};function D(Ae){var te=f.createObjectURL(new Blob([Ae.toString(),"this.onmessage = function (eee) {"+Ae.name+"(eee.data);}"],{type:"application/javascript"})),ae=new Worker(te);return f.revokeObjectURL(te),ae}function O(Ae){function te(de){var De=he(de);if(!De)return[];for(var Ie=3e4,fe=[{id:440786851,data:[{data:1,id:17030},{data:1,id:17143},{data:4,id:17138},{data:8,id:17139},{data:"webm",id:17026},{data:2,id:17031},{data:2,id:17029}]},{id:408125543,data:[{id:357149030,data:[{data:1e6,id:2807729},{data:"whammy",id:19840},{data:"whammy",id:22337},{data:Ce(De.duration),id:17545}]},{id:374648427,data:[{id:174,data:[{data:1,id:215},{data:1,id:29637},{data:0,id:156},{data:"und",id:2274716},{data:"V_VP8",id:134},{data:"VP8",id:2459272},{data:1,id:131},{id:224,data:[{data:De.width,id:176},{data:De.height,id:186}]}]}]}]}],be=0,Ne=0;be<de.length;){var Ue=[],je=0;do Ue.push(de[be]),je+=de[be].duration,be++;while(be<de.length&&je<Ie);var Ge=0,Ye={id:524531317,data:ae(Ne,Ge,Ue)};fe[1].data.push(Ye),Ne+=je}return ue(fe)}function ae(de,De,Ie){return[{data:de,id:231}].concat(Ie.map(function(fe){var be=ke({frame:fe.data.slice(4),trackNum:1,timecode:Math.round(De)});return De+=fe.duration,{data:be,id:163}}))}function he(de){if(!de[0]){postMessage({error:"Something went wrong. Maybe WebP format is not supported in the current browser."});return}for(var De=de[0].width,Ie=de[0].height,fe=de[0].duration,be=1;be<de.length;be++)fe+=de[be].duration;return{duration:fe,width:De,height:Ie}}function X(de){for(var De=[];de>0;)De.push(de&255),de=de>>8;return new Uint8Array(De.reverse())}function ve(de){return new Uint8Array(de.split("").map(function(De){return De.charCodeAt(0)}))}function we(de){var De=[],Ie=de.length%8?new Array(9-de.length%8).join("0"):"";de=Ie+de;for(var fe=0;fe<de.length;fe+=8)De.push(parseInt(de.substr(fe,8),2));return new Uint8Array(De)}function ue(de){for(var De=[],Ie=0;Ie<de.length;Ie++){var fe=de[Ie].data;typeof fe=="object"&&(fe=ue(fe)),typeof fe=="number"&&(fe=we(fe.toString(2))),typeof fe=="string"&&(fe=ve(fe));var be=fe.size||fe.byteLength||fe.length,Ne=Math.ceil(Math.ceil(Math.log(be)/Math.log(2))/8),Ue=be.toString(2),je=new Array(Ne*7+7+1-Ue.length).join("0")+Ue,Ge=new Array(Ne).join("0")+"1"+je;De.push(X(de[Ie].id)),De.push(we(Ge)),De.push(fe)}return new Blob(De,{type:"video/webm"})}function ke(de){var De=0;De|=128;var Ie=[de.trackNum|128,de.timecode>>8,de.timecode&255,De].map(function(fe){return String.fromCharCode(fe)}).join("")+de.frame;return Ie}function G(de){for(var De=de.RIFF[0].WEBP[0],Ie=De.indexOf("*"),fe=0,be=[];fe<4;fe++)be[fe]=De.charCodeAt(Ie+3+fe);var Ne,Ue,je;return je=be[1]<<8|be[0],Ne=je&16383,je=be[3]<<8|be[2],Ue=je&16383,{width:Ne,height:Ue,data:De,riff:de}}function K(de,De){return parseInt(de.substr(De+4,4).split("").map(function(Ie){var fe=Ie.charCodeAt(0).toString(2);return new Array(8-fe.length+1).join("0")+fe}).join(""),2)}function re(de){for(var De=0,Ie={};De<de.length;){var fe=de.substr(De,4),be=K(de,De),Ne=de.substr(De+4+4,be);De+=8+be,Ie[fe]=Ie[fe]||[],fe==="RIFF"||fe==="LIST"?Ie[fe].push(re(Ne)):Ie[fe].push(Ne)}return Ie}function Ce(de){return[].slice.call(new Uint8Array(new Float64Array([de]).buffer),0).map(function(De){return String.fromCharCode(De)}).reverse().join("")}var Be=new te(Ae.map(function(de){var De=G(re(atob(de.image.slice(23))));return De.duration=de.duration,De}));postMessage(Be)}return I.prototype.compile=function(Ae){var te=D(O);te.onmessage=function(ae){if(ae.data.error){console.error(ae.data.error);return}Ae(ae.data)},te.postMessage(this.frames)},{Video:I}}();typeof t<"u"&&(t.Whammy=Fe);/**
* DiskStorage is a standalone object used by {@link RecordRTC} to store recorded blobs in IndexedDB storage.
* @summary Writing blobs into IndexedDB.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @example
* DiskStorage.Store({
* audioBlob: yourAudioBlob,
* videoBlob: yourVideoBlob,
* gifBlob : yourGifBlob
* });
* DiskStorage.Fetch(function(dataURL, type) {
* if(type === 'audioBlob') { }
* if(type === 'videoBlob') { }
* if(type === 'gifBlob') { }
* });
* // DiskStorage.dataStoreName = 'recordRTC';
* // DiskStorage.onError = function(error) { };
* @property {function} init - This method must be called once to initialize IndexedDB ObjectStore. Though, it is auto-used internally.
* @property {function} Fetch - This method fetches stored blobs from IndexedDB.
* @property {function} Store - This method stores blobs in IndexedDB.
* @property {function} onError - This function is invoked for any known/unknown error.
* @property {string} dataStoreName - Name of the ObjectStore created in IndexedDB storage.
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
*/var ge={init:function(){var I=this;if(typeof indexedDB>"u"||typeof indexedDB.open>"u"){console.error("IndexedDB API are not available in this browser.");return}var D=1,O=this.dbName||location.href.replace(/\/|:|#|%|\.|\[|\]/g,""),Ae,te=indexedDB.open(O,D);function ae(X){X.createObjectStore(I.dataStoreName)}function he(){var X=Ae.transaction([I.dataStoreName],"readwrite");I.videoBlob&&X.objectStore(I.dataStoreName).put(I.videoBlob,"videoBlob"),I.gifBlob&&X.objectStore(I.dataStoreName).put(I.gifBlob,"gifBlob"),I.audioBlob&&X.objectStore(I.dataStoreName).put(I.audioBlob,"audioBlob");function ve(we){X.objectStore(I.dataStoreName).get(we).onsuccess=function(ue){I.callback&&I.callback(ue.target.result,we)}}ve("audioBlob"),ve("videoBlob"),ve("gifBlob")}te.onerror=I.onError,te.onsuccess=function(){if(Ae=te.result,Ae.onerror=I.onError,Ae.setVersion)if(Ae.version!==D){var X=Ae.setVersion(D);X.onsuccess=function(){ae(Ae),he()}}else he();else he()},te.onupgradeneeded=function(X){ae(X.target.result)}},Fetch:function(I){return this.callback=I,this.init(),this},Store:function(I){return this.audioBlob=I.audioBlob,this.videoBlob=I.videoBlob,this.gifBlob=I.gifBlob,this.init(),this},onError:function(I){console.error(JSON.stringify(I,null," "))},dataStoreName:"recordRTC",dbName:null};typeof t<"u"&&(t.DiskStorage=ge);/**
* GifRecorder is standalone calss used by {@link RecordRTC} to record video or canvas into animated gif.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef GifRecorder
* @class
* @example
* var recorder = new GifRecorder(mediaStream || canvas || context, { onGifPreview: function, onGifRecordingStarted: function, width: 1280, height: 720, frameRate: 200, quality: 10 });
* recorder.record();
* recorder.stop(function(blob) {
* img.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object or HTMLCanvasElement or CanvasRenderingContext2D.
* @param {object} config - {disableLogs:true, initCallback: function, width: 320, height: 240, frameRate: 200, quality: 10}
*/function le(I,D){if(typeof GIFEncoder>"u"){var O=document.createElement("script");O.src="https://www.webrtc-experiment.com/gif-recorder.js",(document.body||document.documentElement).appendChild(O)}D=D||{};var Ae=I instanceof CanvasRenderingContext2D||I instanceof HTMLCanvasElement;this.record=function(){if(typeof GIFEncoder>"u"){setTimeout(K.record,1e3);return}if(!ve){setTimeout(K.record,1e3);return}Ae||(D.width||(D.width=we.offsetWidth||320),D.height||(D.height=we.offsetHeight||240),D.video||(D.video={width:D.width,height:D.height}),D.canvas||(D.canvas={width:D.width,height:D.height}),he.width=D.canvas.width||320,he.height=D.canvas.height||240,we.width=D.video.width||320,we.height=D.video.height||240),G=new GIFEncoder,G.setRepeat(0),G.setDelay(D.frameRate||200),G.setQuality(D.quality||10),G.start(),typeof D.onGifRecordingStarted=="function"&&D.onGifRecordingStarted();function re(Ce){if(K.clearedRecordedData!==!0){if(te)return setTimeout(function(){re(Ce)},100);ue=r(re),typeof ke===void 0&&(ke=Ce),!(Ce-ke<90)&&(!Ae&&we.paused&&we.play(),Ae||X.drawImage(we,0,0,he.width,he.height),D.onGifPreview&&D.onGifPreview(he.toDataURL("image/png")),G.addFrame(X),ke=Ce)}}ue=r(re),D.initCallback&&D.initCallback()},this.stop=function(re){re=re||function(){},ue&&l(ue),this.blob=new Blob([new Uint8Array(G.stream().bin)],{type:"image/gif"}),re(this.blob),G.stream().bin=[]};var te=!1;this.pause=function(){te=!0},this.resume=function(){te=!1},this.clearRecordedData=function(){K.clearedRecordedData=!0,ae()};function ae(){G&&(G.stream().bin=[])}this.name="GifRecorder",this.toString=function(){return this.name};var he=document.createElement("canvas"),X=he.getContext("2d");Ae&&(I instanceof CanvasRenderingContext2D?(X=I,he=X.canvas):I instanceof HTMLCanvasElement&&(X=I.getContext("2d"),he=I));var ve=!0;if(!Ae){var we=document.createElement("video");we.muted=!0,we.autoplay=!0,we.playsInline=!0,ve=!1,we.onloadedmetadata=function(){ve=!0},U(I,we),we.play()}var ue=null,ke,G,K=this}typeof t<"u"&&(t.GifRecorder=le);function se(I,D){var O="Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45";(function(fe){typeof t<"u"||fe&&(typeof window<"u"||typeof ii>"u"||(ii.navigator={userAgent:O,getUserMedia:function(){}},ii.console||(ii.console={}),(typeof ii.console.log>"u"||typeof ii.console.error>"u")&&(ii.console.error=ii.console.log=ii.console.log||function(){console.log(arguments)}),typeof document>"u"&&(fe.document={documentElement:{appendChild:function(){return""}}},document.createElement=document.captureStream=document.mozCaptureStream=function(){var be={getContext:function(){return be},play:function(){},pause:function(){},drawImage:function(){},toDataURL:function(){return""},style:{}};return be},fe.HTMLVideoElement=function(){}),typeof location>"u"&&(fe.location={protocol:"file:",href:"",hash:""}),typeof screen>"u"&&(fe.screen={width:0,height:0}),typeof we>"u"&&(fe.URL={createObjectURL:function(){return""},revokeObjectURL:function(){return""}}),fe.window=ii))})(typeof ii<"u"?ii:null),D=D||"multi-streams-mixer";var Ae=[],te=!1,ae=document.createElement("canvas"),he=ae.getContext("2d");ae.style.opacity=0,ae.style.position="absolute",ae.style.zIndex=-1,ae.style.top="-1000em",ae.style.left="-1000em",ae.className=D,(document.body||document.documentElement).appendChild(ae),this.disableLogs=!1,this.frameInterval=10,this.width=360,this.height=240,this.useGainNode=!0;var X=this,ve=window.AudioContext;typeof ve>"u"&&(typeof webkitAudioContext<"u"&&(ve=webkitAudioContext),typeof mozAudioContext<"u"&&(ve=mozAudioContext));var we=window.URL;typeof we>"u"&&typeof webkitURL<"u"&&(we=webkitURL),typeof navigator<"u"&&typeof navigator.getUserMedia>"u"&&(typeof navigator.webkitGetUserMedia<"u"&&(navigator.getUserMedia=navigator.webkitGetUserMedia),typeof navigator.mozGetUserMedia<"u"&&(navigator.getUserMedia=navigator.mozGetUserMedia));var ue=window.MediaStream;typeof ue>"u"&&typeof webkitMediaStream<"u"&&(ue=webkitMediaStream),typeof ue<"u"&&typeof ue.prototype.stop>"u"&&(ue.prototype.stop=function(){this.getTracks().forEach(function(f
* MultiStreamRecorder can record multiple videos in single container.
* @summary Multi-videos recorder.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef MultiStreamRecorder
* @class
* @example
* var options = {
* mimeType: 'video/webm'
* }
* var recorder = new MultiStreamRecorder(ArrayOfMediaStreams, options);
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
*
* // or
* var blob = recorder.blob;
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStreams} mediaStreams - Array of MediaStreams.
* @param {object} config - {disableLogs:true, frameInterval: 1, mimeType: "video/webm"}
*/function me(I,D){I=I||[];var O=this,Ae,te;D=D||{elementClass:"multi-streams-mixer",mimeType:"video/webm",video:{width:360,height:240}},D.frameInterval||(D.frameInterval=10),D.video||(D.video={}),D.video.width||(D.video.width=360),D.video.height||(D.video.height=240),this.record=function(){Ae=new se(I,D.elementClass||"multi-streams-mixer"),ae().length&&(Ae.frameInterval=D.frameInterval||10,Ae.width=D.video.width||360,Ae.height=D.video.height||240,Ae.startDrawingFrames()),D.previewStream&&typeof D.previewStream=="function"&&D.previewStream(Ae.getMixedStream()),te=new R(Ae.getMixedStream(),D),te.record()};function ae(){var he=[];return I.forEach(function(X){S(X,"video").forEach(function(ve){he.push(ve)})}),he}this.stop=function(he){te&&te.stop(function(X){O.blob=X,he(X),O.clearRecordedData()})},this.pause=function(){te&&te.pause()},this.resume=function(){te&&te.resume()},this.clearRecordedData=function(){te&&(te.clearRecordedData(),te=null),Ae&&(Ae.releaseStreams(),Ae=null)},this.addStreams=function(he){if(!he)throw"First parameter is required.";he instanceof Array||(he=[he]),I.concat(he),!(!te||!Ae)&&(Ae.appendStreams(he),D.previewStream&&typeof D.previewStream=="function"&&D.previewStream(Ae.getMixedStream()))},this.resetVideoStreams=function(he){Ae&&(he&&!(he instanceof Array)&&(he=[he]),Ae.resetVideoStreams(he))},this.getMixer=function(){return Ae},this.name="MultiStreamRecorder",this.toString=function(){return this.name}}typeof t<"u"&&(t.MultiStreamRecorder=me);/**
* RecordRTCPromisesHandler adds promises support in {@link RecordRTC}. Try a {@link https://github.com/muaz-khan/RecordRTC/blob/master/simple-demos/RecordRTCPromisesHandler.html|demo here}
* @summary Promises for {@link RecordRTC}
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef RecordRTCPromisesHandler
* @class
* @example
* var recorder = new RecordRTCPromisesHandler(mediaStream, options);
* recorder.startRecording()
* .then(successCB)
* .catch(errorCB);
* // Note: You can access all RecordRTC API using "recorder.recordRTC" e.g.
* recorder.recordRTC.onStateChanged = function(state) {};
* recorder.recordRTC.setRecordingDuration(5000);
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - Single media-stream object, array of media-streams, html-canvas-element, etc.
* @param {object} config - {type:"video", recorderType: MediaStreamRecorder, disableLogs: true, numberOfAudioChannels: 1, bufferSize: 0, sampleRate: 0, video: HTMLVideoElement, etc.}
* @throws Will throw an error if "new" keyword is not used to initiate "RecordRTCPromisesHandler". Also throws error if first argument "MediaStream" is missing.
* @requires {@link RecordRTC}
*/function $(I,D){if(!this)throw'Use "new RecordRTCPromisesHandler()"';if(typeof I>"u")throw'First argument "MediaStream" is required.';var O=this;O.recordRTC=new t(I,D),this.startRecording=function(){return new Promise(function(Ae,te){try{O.recordRTC.startRecording(),Ae()}catch(ae){te(ae)}})},this.stopRecording=function(){return new Promise(function(Ae,te){try{O.recordRTC.stopRecording(function(ae){if(O.blob=O.recordRTC.getBlob(),!O.blob||!O.blob.size){te("Empty blob.",O.blob);return}Ae(ae)})}catch(ae){te(ae)}})},this.pauseRecording=function(){return new Promise(function(Ae,te){try{O.recordRTC.pauseRecording(),Ae()}catch(ae){te(ae)}})},this.resumeRecording=function(){return new Promise(function(Ae,te){try{O.recordRTC.resumeRecording(),Ae()}catch(ae){te(ae)}})},this.getDataURL=function(Ae){return new Promise(function(te,ae){try{O.recordRTC.getDataURL(function(he){te(he)})}catch(he){ae(he)}})},this.getBlob=function(){return new Promise(function(Ae,te){try{Ae(O.recordRTC.getBlob())}catch(ae){te(ae)}})},this.getInternalRecorder=function(){return new Promise(function(Ae,te){try{Ae(O.recordRTC.getInternalRecorder())}catch(ae){te(ae)}})},this.reset=function(){return new Promise(function(Ae,te){try{Ae(O.recordRTC.reset())}catch(ae){te(ae)}})},this.destroy=function(){return new Promise(function(Ae,te){try{Ae(O.recordRTC.destroy())}catch(ae){te(ae)}})},this.getState=function(){return new Promise(function(Ae,te){try{Ae(O.recordRTC.getState())}catch(ae){te(ae)}})},this.blob=null,this.version="5.6.2"}typeof t<"u"&&(t.RecordRTCPromisesHandler=$);/**
* WebAssemblyRecorder lets you create webm videos in JavaScript via WebAssembly. The library consumes raw RGBA32 buffers (4 bytes per pixel) and turns them into a webm video with the given framerate and quality. This makes it compatible out-of-the-box with ImageData from a CANVAS. With realtime mode you can also use webm-wasm for streaming webm videos.
* @summary Video recording feature in Chrome, Firefox and maybe Edge.
* @license {@link https://github.com/muaz-khan/RecordRTC/blob/master/LICENSE|MIT}
* @author {@link https://MuazKhan.com|Muaz Khan}
* @typedef WebAssemblyRecorder
* @class
* @example
* var recorder = new WebAssemblyRecorder(mediaStream);
* recorder.record();
* recorder.stop(function(blob) {
* video.src = URL.createObjectURL(blob);
* });
* @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code}
* @param {MediaStream} mediaStream - MediaStream object fetched using getUserMedia API or generated using captureStreamUntilEnded or WebAudio API.
* @param {object} config - {webAssemblyPath:'webm-wasm.wasm',workerPath: 'webm-worker.js', frameRate: 30, width: 1920, height: 1080, bitrate: 1024, realtime: true}
*/function q(I,D){(typeof ReadableStream>"u"||typeof WritableStream>"u")&&console.error("Following polyfill is strongly recommended: https://unpkg.com/@mattiasbuelens/web-streams-polyfill/dist/polyfill.min.js"),D=D||{},D.width=D.width||640,D.height=D.height||480,D.frameRate=D.frameRate||30,D.bitrate=D.bitrate||1200,D.realtime=D.realtime||!0;var O;function Ae(){return new ReadableStream({start:function(we){var ue=document.createElement("canvas"),ke=document.createElement("video"),G=!0;ke.srcObject=I,ke.muted=!0,ke.height=D.height,ke.width=D.width,ke.volume=0,ke.onplaying=function(){ue.width=D.width,ue.height=D.height;var K=ue.getContext("2d"),re=1e3/D.frameRate,Ce=setInterval(function(){if(O&&(clearInterval(Ce),we.close()),G&&(G=!1,D.onVideoProcessStarted&&D.onVideoProcessStarted()),K.drawImage(ke,0,0),we._controlledReadableStream.state!=="closed")try{we.enqueue(K.getImageData(0,0,D.width,D.height))}catch{}},re)},ke.play()}})}var te;function ae(we,ue){if(!D.workerPath&&!ue){O=!1,fetch("https://unpkg.com/webm-wasm@latest/dist/webm-worker.js").then(function(G){G.arrayBuffer().then(function(K){ae(we,K)})});return}if(!D.workerPath&&ue instanceof ArrayBuffer){var ke=new Blob([ue],{type:"text/javascript"});D.workerPath=f.createObjectURL(ke)}D.workerPath||console.error("workerPath parameter is missing."),te=new Worker(D.workerPath),te.postMessage(D.webAssemblyPath||"https://unpkg.com/webm-wasm@latest/dist/webm-wasm.wasm"),te.addEventListener("message",function(G){G.data==="READY"?(te.postMessage({width:D.width,height:D.height,bitrate:D.bitrate||1200,timebaseDen:D.frameRate||30,realtime:D.realtime}),Ae().pipeTo(new WritableStream({write:function(K){if(O){console.error("Got image, but recorder is finished!");return}te.postMessage(K.data.buffer,[K.data.buffer])}}))):G.data&&(he||ve.push(G.data))})}this.record=function(){ve=[],he=!1,this.blob=null,ae(I),typeof D.initCallback=="function"&&D.initCallback()};var he;this.pause=function(){he=!0},this.resume=function(){he=!1};function X(we){if(!te){we&&we();return}te.addEventListener("message",function(ue){ue.data===null&&(te.terminate(),te=null,we&&we())}),te.postMessage(null)}var ve=[];this.stop=function(we){O=!0;var ue=this;X(function(){ue.blob=new Blob(ve,{type:"video/webm"}),we(ue.blob)})},this.name="WebAssemblyRecorder",this.toString=function(){return this.name},this.clearRecordedData=function(){ve=[],he=!1,this.blob=null},this.blob=null}typeof t<"u"&&(t.WebAssemblyRecorder=q)}(rp)),rp.exports}var qW=zW();const P1=p1(qW),VW={name:"MessageContactInfo",components:{ContactsAddContact:wh,Icon:He},computed:{...J(Kt,["contacts"]),...J(oe,["config","IntlString","blockedNumbers"])},data(){return{phoneNumber:null,contact:null}},methods:{...Te(Ei,["setBrowsePicture"]),blockContact(){if(this.blockCooldown){M.onshow_notification({data:{img:"/public/img/Apps/message.png",apptitle:"APP_MESSAGES_NAME",title:"CONTACT_APP_CONTACT_VIEW_BLOCK_COOLDOWN",message:""}});return}if(this.blockCooldown=!0,setTimeout(()=>{this.blockCooldown=!1},5e3),this.blockedNumbers.includes(this.phoneNumber)){M.post("removeBlock",{number:this.phoneNumber});return}M.post("blockContact",{number:this.phoneNumber})},startVideoCall(){M.startVideoCall(this.phoneNumber)},closeContactInfo(){document.getElementById("contact-addcontact-box").classList.add("addcontact-out-class"),setTimeout(()=>{this.phoneNumber=null},180),Xe().emit("message_brightness",{brightness:!1})},addContact(){Xe().emit("addContact",{number:this.phoneNumber})},sendGPSLocation(){M.sendMessage(this.phoneNumber,"%pos%"),this.closeContactInfo()},startCall(){M.startCall(this.phoneNumber)},openMail(e){xe.push("/mail/list/"+e)}},mounted(){this.emitter.on("openMessageContactInfo",e=>{if(!e.number){xe.push("/messages/list");return}this.phoneNumber=e.number;let t=this.contacts.filter(i=>i.number==e.number);t.length>0&&(this.contact=t[0]),Xe().emit("message_brightness",{brightness:!0})})},beforeUnmount(){this.emitter.off("openMessageContactInfo")}},WW={key:0,class:"contact-addcontact-box",id:"contact-addcontact-box"},KW=["src"],YW={class:"message-group-edit-name"},XW={
* Leaflet 1.9.4, a JS library for interactive maps. https://leafletjs.com
* (c) 2010-2023 Vladimir Agafonkin, (c) 2010-2011 CloudMade
*/var fme=Fu.exports,Fv;function Ame(){return Fv||(Fv=1,function(e,t){(function(i,n){n(t)})(fme,function(i){var n="1.9.4";function a(c){var g,v,k,P;for(v=1,k=arguments.length;v<k;v++){P=arguments[v];for(g in P)c[g]=P[g]}return c}var s=Object.create||function(){function c(){}return function(g){return c.prototype=g,new c}}();function r(c,g){var v=Array.prototype.slice;if(c.bind)return c.bind.apply(c,v.call(arguments,1));var k=v.call(arguments,2);return function(){return c.apply(g,k.length?k.concat(v.call(arguments)):arguments)}}var o=0;function l(c){return"_leaflet_id"in c||(c._leaflet_id=++o),c._leaflet_id}function u(c,g,v){var k,P,j,pe;return pe=function(){k=!1,P&&(j.apply(v,P),P=!1)},j=function(){k?P=arguments:(c.apply(v,arguments),setTimeout(pe,g),k=!0)},j}function f(c,g,v){var k=g[1],P=g[0],j=k-P;return c===k&&v?c:((c-P)%j+j)%j+P}function A(){return!1}function h(c,g){if(g===!1)return c;var v=Math.pow(10,g===void 0?6:g);return Math.round(c*v)/v}function p(c){return c.trim?c.trim():c.replace(/^\s+|\s+$/g,"")}function F(c){return p(c).split(/\s+/)}function y(c,g){Object.prototype.hasOwnProperty.call(c,"options")||(c.options=c.options?s(c.options):{});for(var v in g)c.options[v]=g[v];return c.options}function E(c,g,v){var k=[];for(var P in c)k.push(encodeURIComponent(v?P.toUpperCase():P)+"="+encodeURIComponent(c[P]));return(!g||g.indexOf("?")===-1?"?":"&")+k.join("&")}var w=/\{ *([\w_ -]+) *\}/g;function C(c,g){return c.replace(w,function(v,k){var P=g[k];if(P===void 0)throw new Error("No value provided for variable "+v);return typeof P=="function"&&(P=P(g)),P})}var B=Array.isArray||function(c){return Object.prototype.toString.call(c)==="[object Array]"};function S(c,g){for(var v=0;v<c.length;v++)if(c[v]===g)return v;return-1}var U="data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs=";function N(c){return window["webkit"+c]||window["moz"+c]||window["ms"+c]}var z=0;function Q(c){var g=+new Date,v=Math.max(0,16-(g-z));return z=g+v,window.setTimeout(c,v)}var R=window.requestAnimationFrame||N("RequestAnimationFrame")||Q,W=window.cancelAnimationFrame||N("CancelAnimationFrame")||N("CancelRequestAnimationFrame")||function(c){window.clearTimeout(c)};function Z(c,g,v){if(v&&R===Q)c.call(g);else return R.call(window,r(c,g))}function ce(c){c&&W.call(window,c)}var Fe={__proto__:null,extend:a,create:s,bind:r,get lastId(){return o},stamp:l,throttle:u,wrapNum:f,falseFn:A,formatNum:h,trim:p,splitWords:F,setOptions:y,getParamString:E,template:C,isArray:B,indexOf:S,emptyImageUrl:U,requestFn:R,cancelFn:W,requestAnimFrame:Z,cancelAnimFrame:ce};function ge(){}ge.extend=function(c){var g=function(){y(this),this.initialize&&this.initialize.apply(this,arguments),this.callInitHooks()},v=g.__super__=this.prototype,k=s(v);k.constructor=g,g.prototype=k;for(var P in this)Object.prototype.hasOwnProperty.call(this,P)&&P!=="prototype"&&P!=="__super__"&&(g[P]=this[P]);return c.statics&&a(g,c.statics),c.includes&&(le(c.includes),a.apply(null,[k].concat(c.includes))),a(k,c),delete k.statics,delete k.includes,k.options&&(k.options=v.options?s(v.options):{},a(k.options,c.options)),k._initHooks=[],k.callInitHooks=function(){if(!this._initHooksCalled){v.callInitHooks&&v.callInitHooks.call(this),this._initHooksCalled=!0;for(var j=0,pe=k._initHooks.length;j<pe;j++)k._initHooks[j].call(this)}},g},ge.include=function(c){var g=this.prototype.options;return a(this.prototype,c),c.options&&(this.prototype.options=g,this.mergeOptions(c.options)),this},ge.mergeOptions=function(c){return a(this.prototype.options,c),this},ge.addInitHook=function(c){var g=Array.prototype.slice.call(arguments,1),v=typeof c=="function"?c:function(){this[c].apply(this,g)};return this.prototype._initHooks=this.prototype._initHooks||[],this.prototype._initHooks.push(v),this};function le(c){if(!(typeof L>"u"||!L||!L.Mixin)){c=B(c)?c:[c];for(var g=0;g<c.length;g++)c[g]===L.Mixin.Events&&console.warn("Deprecated include of L.Mixin.Events: this property will be removed in future releases, please inherit from L.Evented instead.",new Error().stack)}}var se={on:function(c,g,v){if(typeof c=="ob
`,isFavourite:1,isDeleted:0,time:new Date},{id:4,identifier:"hdewhew",picture:"https://wallpapers.com/images/hd/fivem-9z6vdna3pkg05t7w.jpg",isFavourite:0,isDeleted:1,time:new Date}]}testYellowPage(){const t=Wr();t.posts=[{id:1,title:"Tes434343rr43r43rrrrrrrrrr7843874r3867r436784r3687543876t",text:"Te 43243 43r43r434r3r43r43 4r3434354535345433545435433454r3r43r43434343rst",number:122121},{id:2,title:"Auto zu verkaufen 223e32 32 32r323 r332",text:"Hey ich biete hier meinen Lamborghini an zu verkaufen Preis: 500 euro schnapper gönnt euch.",image:"https://www.lamborghini.com/sites/it-en/files/DAM/lamborghini/facelift_2019/models_gw/2023/03_29_revuelto/gate_models_s_02_m.jpg",number:211}]}testNewsPosts(){const t=Kr();t.posts=[{id:1,title:"Eröffnungs Cafe",text:"Hey wir eröffnen heute unser cafe in der blumenstraße!",number:122121,image:"https://cdn.discordapp.com/attachments/880552660447658007/1228016373020753930/screenshot.png?ex=662a825e&is=66180d5e&hm=bece92667d5374dbd805499697a43a0abfeb8617089cd9a3d73698ccb1f15438&"},{id:2,title:"Eröffnungs Cafe",text:"Hey wir eröffnen heute unser cafe in der blumenstraße!",number:122121,image:"https://www.swr.de/wissen/1000-antworten/1676037642957%2Cblumenwiese-118~_v-16x9@2dM_-ad6791ade5eb8b5c935dd377130b903c4b5781d8.jpg"}]}camera_open(){return this.post("camera_open")}onTakePhoto(){Xe().emit("takePhoto")}twitter_login(t,i){return this.post("twitter_login",{username:t,password:i})}twitter_postComment(t,i,n,a){return this.post("twitter_postComment",{username:t,password:i,post:n,comment:a})}ontwitter_addComment(t){Xe().emit("addTwitterComment",t.comment)}twitter_changePassword(t,i,n){return this.post("twitter_changePassword",{username:t,password:i,newPassword:n})}twitter_createAccount(t,i,n){return this.post("twitter_createAccount",{username:t,password:i,avatarUrl:n})}twitter_postTweet(t,i,n,a){this.post("twitter_postTweet",{username:t,password:i,message:n,image:a}).then(()=>{xe.push("/twitter/home")})}twitter_postTweetImg(t,i,n){return this.post("twitter_postTweetImg",{username:t,password:i,message:n})}twitter_toggleLikeTweet(t,i,n){return this.post("twitter_toggleLikeTweet",{username:t,password:i,tweetId:n})}twitter_setAvatar(t,i,n){return this.post("twitter_setAvatarUrl",{username:t,password:i,avatarUrl:n})}twitter_getTweets(t,i){return this.post("twitter_getTweets",{username:t,password:i})}twitter_getUserTweets(t,i){return this.post("twitter_getUserTweets",{username:t,password:i})}twitter_deleteTweet(t,i,n){return this.post("twitter_userssDeleteTweet",{username:t,password:i,tweetId:n})}twitter_logout(){return yi().twitterLogout(),this.post("twitter_logout")}async ontwitter_tweets(t){let i=[],n=[];t.tweets.forEach(s=>{n.push(new Promise((r,o)=>{let l=new Image;l.onerror=function(){s.authorIcon="/public/img/user.png",i.push(s),r()},l.onload=function(){i.push(s),r()},l.src=s.authorIcon}))}),await Promise.all(n);const a=yi();a.tweets=i}ontwitter_newTweet(t){const i=yi(),n=i.tweets;i.tweets=[t.tweet,...n]}ontwitter_newpost(t){const i=yi(),n=oe();i.twitterNotification===1&&i.twitterUsername!==t.post.author&&i.twitterUsername!=null&&localStorage.roadphone_app_twitter_app==="1"&&(this.onshow_notification({data:{apptitle:n.IntlString("APP_TWITTER_NOTIF_NEW_POST_TITLE"),message:"",title:t.post.author+" "+n.IntlString("APP_TWITTER_NOTIF_NEW_POST"),img:"/public/img/Apps/tweetwave.png"}}),this.onsetLockscreenNotify({apptitle:n.IntlString("APP_TWITTER_NOTIF_NEW_POST_TITLE"),title:t.post.author+" "+n.IntlString("APP_TWITTER_NOTIF_NEW_POST"),message:"",img:"/public/img/Apps/tweetwave.png",app:"twitter"}))}ontwitter_setAccount(t){yi().setAccount(t)}ontwitter_updateTweetLikes(t){const i=yi(),n=i.tweets.findIndex(s=>s.id===t.tweetId);n!==-1&&(i.tweets[n].likes=t.likes);const a=i.userTweets.findIndex(s=>s.id===t.tweetId);a!==-1&&(i.userTweets[a].likes=t.likes)}ontwitter_setTweetLikes(t){const i=yi(),n=i.tweets.findIndex(s=>s.id===t.tweetId);n!==-1&&(i.tweets[n].isLiked=t.isLiked);const a=i.userTweets.findIndex(s=>s.id===t.tweetId);a!==-1&&(i.userTweets[a].isLiked=t.isLiked)}ontwitter_UserTweets(
* html2canvas 1.4.1 <https://html2canvas.hertzen.com>
* Copyright (c) 2022 Niklas von Hertzen <https://hertzen.com>
* Released under MIT License
*//*! *****************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */var n_=function(e,t){return n_=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(i,n){i.__proto__=n}||function(i,n){for(var a in n)Object.prototype.hasOwnProperty.call(n,a)&&(i[a]=n[a])},n_(e,t)};function Ta(e,t){if(typeof t!="function"&&t!==null)throw new TypeError("Class extends value "+String(t)+" is not a constructor or null");n_(e,t);function i(){this.constructor=e}e.prototype=t===null?Object.create(t):(i.prototype=t.prototype,new i)}var a_=function(){return a_=Object.assign||function(t){for(var i,n=1,a=arguments.length;n<a;n++){i=arguments[n];for(var s in i)Object.prototype.hasOwnProperty.call(i,s)&&(t[s]=i[s])}return t},a_.apply(this,arguments)};function vn(e,t,i,n){function a(s){return s instanceof i?s:new i(function(r){r(s)})}return new(i||(i=Promise))(function(s,r){function o(f){try{u(n.next(f))}catch(A){r(A)}}function l(f){try{u(n.throw(f))}catch(A){r(A)}}function u(f){f.done?s(f.value):a(f.value).then(o,l)}u((n=n.apply(e,[])).next())})}function cn(e,t){var i={label:0,sent:function(){if(s[0]&1)throw s[1];return s[1]},trys:[],ops:[]},n,a,s,r;return r={next:o(0),throw:o(1),return:o(2)},typeof Symbol=="function"&&(r[Symbol.iterator]=function(){return this}),r;function o(u){return function(f){return l([u,f])}}function l(u){if(n)throw new TypeError("Generator is already executing.");for(;i;)try{if(n=1,a&&(s=u[0]&2?a.return:u[0]?a.throw||((s=a.return)&&s.call(a),0):a.next)&&!(s=s.call(a,u[1])).done)return s;switch(a=0,s&&(u=[u[0]&2,s.value]),u[0]){case 0:case 1:s=u;break;case 4:return i.label++,{value:u[1],done:!1};case 5:i.label++,a=u[1],u=[0];continue;case 7:u=i.ops.pop(),i.trys.pop();continue;default:if(s=i.trys,!(s=s.length>0&&s[s.length-1])&&(u[0]===6||u[0]===2)){i=0;continue}if(u[0]===3&&(!s||u[1]>s[0]&&u[1]<s[3])){i.label=u[1];break}if(u[0]===6&&i.label<s[1]){i.label=s[1],s=u;break}if(s&&i.label<s[2]){i.label=s[2],i.ops.push(u);break}s[2]&&i.ops.pop(),i.trys.pop();continue}u=t.call(e,i)}catch(f){u=[6,f],a=0}finally{n=s=0}if(u[0]&5)throw u[1];return{value:u[0]?u[1]:void 0,done:!0}}}function Hd(e,t,i){if(arguments.length===2)for(var n=0,a=t.length,s;n<a;n++)(s||!(n in t))&&(s||(s=Array.prototype.slice.call(t,0,n)),s[n]=t[n]);return e.concat(s||t)}var Is=function(){function e(t,i,n,a){this.left=t,this.top=i,this.width=n,this.height=a}return e.prototype.add=function(t,i,n,a){return new e(this.left+t,this.top+i,this.width+n,this.height+a)},e.fromClientRect=function(t,i){return new e(i.left+t.windowBounds.left,i.top+t.windowBounds.top,i.width,i.height)},e.fromDOMRectList=function(t,i){var n=Array.from(i).find(function(a){return a.width!==0});return n?new e(n.left+t.windowBounds.left,n.top+t.windowBounds.top,n.width,n.height):e.EMPTY},e.EMPTY=new e(0,0,0,0),e}(),Ph=function(e,t){return Is.fromClientRect(e,t.getBoundingClientRect())},r3e=function(e){var t=e.body,i=e.documentElement;if(!t||!i)throw new Error("Unable to get document size");var n=Math.max(Math.max(t.scrollWidth,i.scrollWidth),Math.max(t.offsetWidth,i.offsetWidth),Math.max(t.clientWidth,i.clientWidth)),a=Math.max(Math.max(t.scrollHeight,i.scrollHeight),Math.max(t.offsetHeight,i.offsetHeight),Math.max(t.clientHeight,i.clientHeight));return new Is(0,0,n,a)},Mh=function(e){for(var t=[],i=0,n=e.length;i<n;){var a=e.charCodeAt(i++);if(a>=55296&&a<=56319&&i<n){var s=e.charCodeAt(i++);(s&64512)===56320?t.push(((a&1023)<<10)+(s&1023)+65536):(t.push(a),i--)}else t.push(a)}return t},Li=function(){for(var e=[],t=0;t<arguments.length;t++)e[t]=arguments[t];if(String.fromCodePoint)return String.fromCodePoint.apply(String,e);var i=e.length;if(!i)return"";for(var n=[],a=-1,s="";++a<i;){var r=e[a];r<=65535?n.push(r):(r-=65536,n.push((r>>10)+55296,r%1024+56320)),(a+1===i||n.length>16384)&&(s+=String.fromCharCode.apply(String,n),n.length=0)}return s},bv="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o3e=typeof Uint8Array>"u"?[]:new Uint8Array(256);for(var Qd=0;Qd<bv.length;Qd++)o3e[bv.charCodeAt(Qd)]=Qd;var vv="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvw
content: "" !important;
display: none !important;
}`,qke=function(e){Vke(e,"."+w_+Gke+g2+`
.`+C_+zke+g2)},Vke=function(e,t){var i=e.ownerDocument;if(i){var n=i.createElement("style");n.textContent=t,e.appendChild(n)}},eE=function(){function e(){}return e.getOrigin=function(t){var i=e._link;return i?(i.href=t,i.href=i.href,i.protocol+i.hostname+i.port):"about:blank"},e.isSameOrigin=function(t){return e.getOrigin(t)===e._origin},e.setContext=function(t){e._link=t.document.createElement("a"),e._origin=e.getOrigin(t.location.href)},e._origin="about:blank",e}(),Wke=function(){function e(t,i){this.context=t,this._options=i,this._cache={}}return e.prototype.addImage=function(t){var i=Promise.resolve();return this.has(t)||(Sp(t)||Jke(t))&&(this._cache[t]=this.loadImage(t)).catch(function(){}),i},e.prototype.match=function(t){return this._cache[t]},e.prototype.loadImage=function(t){return vn(this,void 0,void 0,function(){var i,n,a,s,r=this;return cn(this,function(o){switch(o.label){case 0:return i=eE.isSameOrigin(t),n=!Bp(t)&&this._options.useCORS===!0&&an.SUPPORT_CORS_IMAGES&&!i,a=!Bp(t)&&!i&&!Sp(t)&&typeof this._options.proxy=="string"&&an.SUPPORT_CORS_XHR&&!n,!i&&this._options.allowTaint===!1&&!Bp(t)&&!Sp(t)&&!a&&!n?[2]:(s=t,a?[4,this.proxy(s)]:[3,2]);case 1:s=o.sent(),o.label=2;case 2:return this.context.logger.debug("Added image "+t.substring(0,256)),[4,new Promise(function(l,u){var f=new Image;f.onload=function(){return l(f)},f.onerror=u,(Zke(s)||n)&&(f.crossOrigin="anonymous"),f.src=s,f.complete===!0&&setTimeout(function(){return l(f)},500),r._options.imageTimeout>0&&setTimeout(function(){return u("Timed out ("+r._options.imageTimeout+"ms) loading image")},r._options.imageTimeout)})];case 3:return[2,o.sent()]}})})},e.prototype.has=function(t){return typeof this._cache[t]<"u"},e.prototype.keys=function(){return Promise.resolve(Object.keys(this._cache))},e.prototype.proxy=function(t){var i=this,n=this._options.proxy;if(!n)throw new Error("No proxy defined");var a=t.substring(0,256);return new Promise(function(s,r){var o=an.SUPPORT_RESPONSE_TYPE?"blob":"text",l=new XMLHttpRequest;l.onload=function(){if(l.status===200)if(o==="text")s(l.response);else{var A=new FileReader;A.addEventListener("load",function(){return s(A.result)},!1),A.addEventListener("error",function(h){return r(h)},!1),A.readAsDataURL(l.response)}else r("Failed to proxy resource "+a+" with status code "+l.status)},l.onerror=r;var u=n.indexOf("?")>-1?"&":"?";if(l.open("GET",""+n+u+"url="+encodeURIComponent(t)+"&responseType="+o),o!=="text"&&l instanceof XMLHttpRequest&&(l.responseType=o),i._options.imageTimeout){var f=i._options.imageTimeout;l.timeout=f,l.ontimeout=function(){return r("Timed out ("+f+"ms) proxying "+a)}}l.send()})},e}(),Kke=/^data:image\/svg\+xml/i,Yke=/^data:image\/.*;base64,/i,Xke=/^data:image\/.*/i,Jke=function(e){return an.SUPPORT_SVG_DRAWING||!$ke(e)},Bp=function(e){return Xke.test(e)},Zke=function(e){return Yke.test(e)},Sp=function(e){return e.substr(0,4)==="blob"},$ke=function(e){return e.substr(-3).toLowerCase()==="svg"||Kke.test(e)},at=function(){function e(t,i){this.type=0,this.x=t,this.y=i}return e.prototype.add=function(t,i){return new e(this.x+t,this.y+i)},e}(),zo=function(e,t,i){return new at(e.x+(t.x-e.x)*i,e.y+(t.y-e.y)*i)},rf=function(){function e(t,i,n,a){this.type=1,this.start=t,this.startControl=i,this.endControl=n,this.end=a}return e.prototype.subdivide=function(t,i){var n=zo(this.start,this.startControl,t),a=zo(this.startControl,this.endControl,t),s=zo(this.endControl,this.end,t),r=zo(n,a,t),o=zo(a,s,t),l=zo(r,o,t);return i?new e(this.start,n,r,l):new e(l,o,s,this.end)},e.prototype.add=function(t,i){return new e(this.start.add(t,i),this.startControl.add(t,i),this.endControl.add(t,i),this.end.add(t,i))},e.prototype.reverse=function(){return new e(this.end,this.endControl,this.startControl,this.start)},e}(),la=function(e){return e.type===1},eBe=function(){function e(t){var i=t.styles,n=t.bounds,a=Cu(i.borderTopLeftRadius,n.width,n.height),s=a[0],r=a[1],o=Cu(i.borderTopRightRadius,n.width,n.height),l=o[0],u=o[1],f=Cu(i.borderBottomRightRadius,n.width,n.height),A=f[0],h=f[1],p=Cu(i.borderBottomLeftRadius,n
* Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com
* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
* Copyright 2024 Fonticons, Inc.
*/function f6e(e,t,i){return(t=h6e(t))in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function F2(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter(function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable})),i.push.apply(i,n)}return i}function Ke(e){for(var t=1;t<arguments.length;t++){var i=arguments[t]!=null?arguments[t]:{};t%2?F2(Object(i),!0).forEach(function(n){f6e(e,n,i[n])}):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(i)):F2(Object(i)).forEach(function(n){Object.defineProperty(e,n,Object.getOwnPropertyDescriptor(i,n))})}return e}function A6e(e,t){if(typeof e!="object"||!e)return e;var i=e[Symbol.toPrimitive];if(i!==void 0){var n=i.call(e,t);if(typeof n!="object")return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return(t==="string"?String:Number)(e)}function h6e(e){var t=A6e(e,"string");return typeof t=="symbol"?t:t+""}const b2=()=>{};let K1={},oE={},lE=null,uE={mark:b2,measure:b2};try{typeof window<"u"&&(K1=window),typeof document<"u"&&(oE=document),typeof MutationObserver<"u"&&(lE=MutationObserver),typeof performance<"u"&&(uE=performance)}catch{}const{userAgent:v2=""}=K1.navigator||{},vr=K1,_i=oE,y2=lE,cf=uE;vr.document;const Os=!!_i.documentElement&&!!_i.head&&typeof _i.addEventListener=="function"&&typeof _i.createElement=="function",cE=~v2.indexOf("MSIE")||~v2.indexOf("Trident/");var g6e=/fa(s|r|l|t|d|dr|dl|dt|b|k|kd|ss|sr|sl|st|sds|sdr|sdl|sdt)?[\-\ ]/,p6e=/Font ?Awesome ?([56 ]*)(Solid|Regular|Light|Thin|Duotone|Brands|Free|Pro|Sharp Duotone|Sharp|Kit)?.*/i,dE={classic:{fa:"solid",fas:"solid","fa-solid":"solid",far:"regular","fa-regular":"regular",fal:"light","fa-light":"light",fat:"thin","fa-thin":"thin",fab:"brands","fa-brands":"brands"},duotone:{fa:"solid",fad:"solid","fa-solid":"solid","fa-duotone":"solid",fadr:"regular","fa-regular":"regular",fadl:"light","fa-light":"light",fadt:"thin","fa-thin":"thin"},sharp:{fa:"solid",fass:"solid","fa-solid":"solid",fasr:"regular","fa-regular":"regular",fasl:"light","fa-light":"light",fast:"thin","fa-thin":"thin"},"sharp-duotone":{fa:"solid",fasds:"solid","fa-solid":"solid",fasdr:"regular","fa-regular":"regular",fasdl:"light","fa-light":"light",fasdt:"thin","fa-thin":"thin"}},m6e={GROUP:"duotone-group",PRIMARY:"primary",SECONDARY:"secondary"},fE=["fa-classic","fa-duotone","fa-sharp","fa-sharp-duotone"],_n="classic",Gh="duotone",_6e="sharp",F6e="sharp-duotone",AE=[_n,Gh,_6e,F6e],b6e={classic:{900:"fas",400:"far",normal:"far",300:"fal",100:"fat"},duotone:{900:"fad",400:"fadr",300:"fadl",100:"fadt"},sharp:{900:"fass",400:"fasr",300:"fasl",100:"fast"},"sharp-duotone":{900:"fasds",400:"fasdr",300:"fasdl",100:"fasdt"}},v6e={"Font Awesome 6 Free":{900:"fas",400:"far"},"Font Awesome 6 Pro":{900:"fas",400:"far",normal:"far",300:"fal",100:"fat"},"Font Awesome 6 Brands":{400:"fab",normal:"fab"},"Font Awesome 6 Duotone":{900:"fad",400:"fadr",normal:"fadr",300:"fadl",100:"fadt"},"Font Awesome 6 Sharp":{900:"fass",400:"fasr",normal:"fasr",300:"fasl",100:"fast"},"Font Awesome 6 Sharp Duotone":{900:"fasds",400:"fasdr",normal:"fasdr",300:"fasdl",100:"fasdt"}},y6e=new Map([["classic",{defaultShortPrefixId:"fas",defaultStyleId:"solid",styleIds:["solid","regular","light","thin","brands"],futureStyleIds:[],defaultFontWeight:900}],["sharp",{defaultShortPrefixId:"fass",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}],["duotone",{defaultShortPrefixId:"fad",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}],["sharp-duotone",{defaultShortPrefixId:"fasds",defaultStyleId:"solid",styleIds:["solid","regular","light","thin"],futureStyleIds:[],defaultFontWeight:900}]]),w6e={classic:{solid:"fas",regular:"far",light:"fal",thin:"fat",brands:"fab"},duotone:{solid:"fad",regular:"fadr",light:"fadl",thin:"fadt"},sharp:{solid:"fass",regular:"fasr",light:"fasl",thin:"fast"},"sharp-duotone":{solid:"fasd
--fa-font-solid: normal 900 1em/1 "Font Awesome 6 Free";
--fa-font-regular: normal 400 1em/1 "Font Awesome 6 Free";
--fa-font-light: normal 300 1em/1 "Font Awesome 6 Pro";
--fa-font-thin: normal 100 1em/1 "Font Awesome 6 Pro";
--fa-font-duotone: normal 900 1em/1 "Font Awesome 6 Duotone";
--fa-font-duotone-regular: normal 400 1em/1 "Font Awesome 6 Duotone";
--fa-font-duotone-light: normal 300 1em/1 "Font Awesome 6 Duotone";
--fa-font-duotone-thin: normal 100 1em/1 "Font Awesome 6 Duotone";
--fa-font-brands: normal 400 1em/1 "Font Awesome 6 Brands";
--fa-font-sharp-solid: normal 900 1em/1 "Font Awesome 6 Sharp";
--fa-font-sharp-regular: normal 400 1em/1 "Font Awesome 6 Sharp";
--fa-font-sharp-light: normal 300 1em/1 "Font Awesome 6 Sharp";
--fa-font-sharp-thin: normal 100 1em/1 "Font Awesome 6 Sharp";
--fa-font-sharp-duotone-solid: normal 900 1em/1 "Font Awesome 6 Sharp Duotone";
--fa-font-sharp-duotone-regular: normal 400 1em/1 "Font Awesome 6 Sharp Duotone";
--fa-font-sharp-duotone-light: normal 300 1em/1 "Font Awesome 6 Sharp Duotone";
--fa-font-sharp-duotone-thin: normal 100 1em/1 "Font Awesome 6 Sharp Duotone";
}
svg:not(:root).svg-inline--fa, svg:not(:host).svg-inline--fa {
overflow: visible;
box-sizing: content-box;
}
.svg-inline--fa {
display: var(--fa-display, inline-block);
height: 1em;
overflow: visible;
vertical-align: -0.125em;
}
.svg-inline--fa.fa-2xs {
vertical-align: 0.1em;
}
.svg-inline--fa.fa-xs {
vertical-align: 0em;
}
.svg-inline--fa.fa-sm {
vertical-align: -0.0714285705em;
}
.svg-inline--fa.fa-lg {
vertical-align: -0.2em;
}
.svg-inline--fa.fa-xl {
vertical-align: -0.25em;
}
.svg-inline--fa.fa-2xl {
vertical-align: -0.3125em;
}
.svg-inline--fa.fa-pull-left {
margin-right: var(--fa-pull-margin, 0.3em);
width: auto;
}
.svg-inline--fa.fa-pull-right {
margin-left: var(--fa-pull-margin, 0.3em);
width: auto;
}
.svg-inline--fa.fa-li {
width: var(--fa-li-width, 2em);
top: 0.25em;
}
.svg-inline--fa.fa-fw {
width: var(--fa-fw-width, 1.25em);
}
.fa-layers svg.svg-inline--fa {
bottom: 0;
left: 0;
margin: auto;
position: absolute;
right: 0;
top: 0;
}
.fa-layers-counter, .fa-layers-text {
display: inline-block;
position: absolute;
text-align: center;
}
.fa-layers {
display: inline-block;
height: 1em;
position: relative;
text-align: center;
vertical-align: -0.125em;
width: 1em;
}
.fa-layers svg.svg-inline--fa {
transform-origin: center center;
}
.fa-layers-text {
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
transform-origin: center center;
}
.fa-layers-counter {
background-color: var(--fa-counter-background-color, #ff253a);
border-radius: var(--fa-counter-border-radius, 1em);
box-sizing: border-box;
color: var(--fa-inverse, #fff);
line-height: var(--fa-counter-line-height, 1);
max-width: var(--fa-counter-max-width, 5em);
min-width: var(--fa-counter-min-width, 1.5em);
overflow: hidden;
padding: var(--fa-counter-padding, 0.25em 0.5em);
right: var(--fa-right, 0);
text-overflow: ellipsis;
top: var(--fa-top, 0);
transform: scale(var(--fa-counter-scale, 0.25));
transform-origin: top right;
}
.fa-layers-bottom-right {
bottom: var(--fa-bottom, 0);
right: var(--fa-right, 0);
top: auto;
transform: scale(var(--fa-layers-scale, 0.25));
transform-origin: bottom right;
}
.fa-layers-bottom-left {
bottom: var(--fa-bottom, 0);
left: var(--fa-left, 0);
right: auto;
top: auto;
transform: scale(var(--fa-layers-scale, 0.25));
transform-origin: bottom left;
}
.fa-layers-top-right {
top: var(--fa-top, 0);
right: var(--fa-right, 0);
transform: scale(var(--fa-layers-scale, 0.25));
transform-origin: top right;
}
.fa-layers-top-left {
left: var(--fa-left, 0);
right: auto;
top: var(--fa-top, 0);
transform: scale(var(--fa-layers-scale, 0.25));
transform-origin: top left;
}
.fa-1x {
font-size: 1em;
}
.fa-2x {
font-size: 2em;
}
.fa-3x {
font-size: 3em;
}
.fa-4x {
font-size: 4em;
}
.fa-5x {
font-size: 5em;
}
.fa-6x {
font-size: 6em;
}
.fa-7x {
font-size: 7em;
}
.fa-8x {
font-size: 8em;
}
.fa-9x {
font-size: 9em;
}
.fa-10x {
font-size: 10em;
}
.fa-2xs {
font-size: 0.625em;
line-height: 0.1em;
vertical-align: 0.225em;
}
.fa-xs {
font-size: 0.75em;
line-height: 0.0833333337em;
vertical-align: 0.125em;
}
.fa-sm {
font-size: 0.875em;
line-height: 0.0714285718em;
vertical-align: 0.0535714295em;
}
.fa-lg {
font-size: 1.25em;
line-height: 0.05em;
vertical-align: -0.075em;
}
.fa-xl {
font-size: 1.5em;
line-height: 0.0416666682em;
vertical-align: -0.125em;
}
.fa-2xl {
font-size: 2em;
line-height: 0.03125em;
vertical-align: -0.1875em;
}
.fa-fw {
text-align: center;
width: 1.25em;
}
.fa-ul {
list-style-type: none;
margin-left: var(--fa-li-margin, 2.5em);
padding-left: 0;
}
.fa-ul > li {
position: relative;
}
.fa-li {
left: calc(-1 * var(--fa-li-width, 2em));
position: absolute;
text-align: center;
width: var(--fa-li-width, 2em);
line-height: inherit;
}
.fa-border {
border-color: var(--fa-border-color, #eee);
border-radius: var(--fa-border-radius, 0.1em);
border-style: var(--fa-border-style, solid);
border-width: var(--fa-border-width, 0.08em);
padding: var(--fa-border-padding, 0.2em 0.25em 0.15em);
}
.fa-pull-left {
float: left;
margin-right: var(--fa-pull-margin, 0.3em);
}
.fa-pull-right {
float: right;
margin-left: var(--fa-pull-margin, 0.3em);
}
.fa-beat {
animation-name: fa-beat;
animation-delay: var(--fa-animation-delay, 0s);
animation-direction: var(--fa-animation-direction, normal);
animation-duration: var(--fa-animation-duration, 1s);
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
animation-timing-function: var(--fa-animation-timing, ease-in-out);
}
.fa-bounce {
animation-name: fa-bounce;
animation-delay: var(--fa-animation-delay, 0s);
animation-direction: var(--fa-animation-direction, normal);
animation-duration: var(--fa-animation-duration, 1s);
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.28, 0.84, 0.42, 1));
}
.fa-fade {
animation-name: fa-fade;
animation-delay: var(--fa-animation-delay, 0s);
animation-direction: var(--fa-animation-direction, normal);
animation-duration: var(--fa-animation-duration, 1s);
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.4, 0, 0.6, 1));
}
.fa-beat-fade {
animation-name: fa-beat-fade;
animation-delay: var(--fa-animation-delay, 0s);
animation-direction: var(--fa-animation-direction, normal);
animation-duration: var(--fa-animation-duration, 1s);
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
animation-timing-function: var(--fa-animation-timing, cubic-bezier(0.4, 0, 0.6, 1));
}
.fa-flip {
animation-name: fa-flip;
animation-delay: var(--fa-animation-delay, 0s);
animation-direction: var(--fa-animation-direction, normal);
animation-duration: var(--fa-animation-duration, 1s);
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
animation-timing-function: var(--fa-animation-timing, ease-in-out);
}
.fa-shake {
animation-name: fa-shake;
animation-delay: var(--fa-animation-delay, 0s);
animation-direction: var(--fa-animation-direction, normal);
animation-duration: var(--fa-animation-duration, 1s);
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
animation-timing-function: var(--fa-animation-timing, linear);
}
.fa-spin {
animation-name: fa-spin;
animation-delay: var(--fa-animation-delay, 0s);
animation-direction: var(--fa-animation-direction, normal);
animation-duration: var(--fa-animation-duration, 2s);
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
animation-timing-function: var(--fa-animation-timing, linear);
}
.fa-spin-reverse {
--fa-animation-direction: reverse;
}
.fa-pulse,
.fa-spin-pulse {
animation-name: fa-spin;
animation-direction: var(--fa-animation-direction, normal);
animation-duration: var(--fa-animation-duration, 1s);
animation-iteration-count: var(--fa-animation-iteration-count, infinite);
animation-timing-function: var(--fa-animation-timing, steps(8));
}
@media (prefers-reduced-motion: reduce) {
.fa-beat,
.fa-bounce,
.fa-fade,
.fa-beat-fade,
.fa-flip,
.fa-pulse,
.fa-shake,
.fa-spin,
.fa-spin-pulse {
animation-delay: -1ms;
animation-duration: 1ms;
animation-iteration-count: 1;
transition-delay: 0s;
transition-duration: 0s;
}
}
@keyframes fa-beat {
0%, 90% {
transform: scale(1);
}
45% {
transform: scale(var(--fa-beat-scale, 1.25));
}
}
@keyframes fa-bounce {
0% {
transform: scale(1, 1) translateY(0);
}
10% {
transform: scale(var(--fa-bounce-start-scale-x, 1.1), var(--fa-bounce-start-scale-y, 0.9)) translateY(0);
}
30% {
transform: scale(var(--fa-bounce-jump-scale-x, 0.9), var(--fa-bounce-jump-scale-y, 1.1)) translateY(var(--fa-bounce-height, -0.5em));
}
50% {
transform: scale(var(--fa-bounce-land-scale-x, 1.05), var(--fa-bounce-land-scale-y, 0.95)) translateY(0);
}
57% {
transform: scale(1, 1) translateY(var(--fa-bounce-rebound, -0.125em));
}
64% {
transform: scale(1, 1) translateY(0);
}
100% {
transform: scale(1, 1) translateY(0);
}
}
@keyframes fa-fade {
50% {
opacity: var(--fa-fade-opacity, 0.4);
}
}
@keyframes fa-beat-fade {
0%, 100% {
opacity: var(--fa-beat-fade-opacity, 0.4);
transform: scale(1);
}
50% {
opacity: 1;
transform: scale(var(--fa-beat-fade-scale, 1.125));
}
}
@keyframes fa-flip {
50% {
transform: rotate3d(var(--fa-flip-x, 0), var(--fa-flip-y, 1), var(--fa-flip-z, 0), var(--fa-flip-angle, -180deg));
}
}
@keyframes fa-shake {
0% {
transform: rotate(-15deg);
}
4% {
transform: rotate(15deg);
}
8%, 24% {
transform: rotate(-18deg);
}
12%, 28% {
transform: rotate(18deg);
}
16% {
transform: rotate(-22deg);
}
20% {
transform: rotate(22deg);
}
32% {
transform: rotate(-12deg);
}
36% {
transform: rotate(12deg);
}
40%, 100% {
transform: rotate(0deg);
}
}
@keyframes fa-spin {
0% {
transform: rotate(0deg);
}
100% {
transform: rotate(360deg);
}
}
.fa-rotate-90 {
transform: rotate(90deg);
}
.fa-rotate-180 {
transform: rotate(180deg);
}
.fa-rotate-270 {
transform: rotate(270deg);
}
.fa-flip-horizontal {
transform: scale(-1, 1);
}
.fa-flip-vertical {
transform: scale(1, -1);
}
.fa-flip-both,
.fa-flip-horizontal.fa-flip-vertical {
transform: scale(-1, -1);
}
.fa-rotate-by {
transform: rotate(var(--fa-rotate-angle, 0));
}
.fa-stack {
display: inline-block;
vertical-align: middle;
height: 2em;
position: relative;
width: 2.5em;
}
.fa-stack-1x,
.fa-stack-2x {
bottom: 0;
left: 0;
margin: auto;
position: absolute;
right: 0;
top: 0;
z-index: var(--fa-stack-z-index, auto);
}
.svg-inline--fa.fa-stack-1x {
height: 1em;
width: 1.25em;
}
.svg-inline--fa.fa-stack-2x {
height: 2em;
width: 2.5em;
}
.fa-inverse {
color: var(--fa-inverse, #fff);
}
.sr-only,
.fa-sr-only {
position: absolute;
width: 1px;
height: 1px;
padding: 0;
margin: -1px;
overflow: hidden;
clip: rect(0, 0, 0, 0);
white-space: nowrap;
border-width: 0;
}
.sr-only-focusable:not(:focus),
.fa-sr-only-focusable:not(:focus) {
position: absolute;
width: 1px;
height: 1px;
padding: 0;
margin: -1px;
overflow: hidden;
clip: rect(0, 0, 0, 0);
white-space: nowrap;
border-width: 0;
}
.svg-inline--fa .fa-primary {
fill: var(--fa-primary-color, currentColor);
opacity: var(--fa-primary-opacity, 1);
}
.svg-inline--fa .fa-secondary {
fill: var(--fa-secondary-color, currentColor);
opacity: var(--fa-secondary-opacity, 0.4);
}
.svg-inline--fa.fa-swap-opacity .fa-primary {
opacity: var(--fa-secondary-opacity, 0.4);
}
.svg-inline--fa.fa-swap-opacity .fa-secondary {
opacity: var(--fa-primary-opacity, 1);
}
.svg-inline--fa mask .fa-primary,
.svg-inline--fa mask .fa-secondary {
fill: black;
}`;function yE(){const e=gE,t=pE,i=mt.cssPrefix,n=mt.replacementClass;let a=iDe;if(i!==e||n!==t){const s=new RegExp("\\.".concat(e,"\\-"),"g"),r=new RegExp("\\--".concat(e,"\\-"),"g"),o=new RegExp("\\.".concat(t),"g");a=a.replace(s,".".concat(i,"-")).replace(r,"--".concat(i,"-")).replace(o,".".concat(n))}return a}let B2=!1;function Tp(){mt.autoAddCss&&!B2&&(J6e(yE()),B2=!0)}var nDe={mixout(){return{dom:{css:yE,insertCss:Tp}}},hooks(){return{beforeDOMElementCreation(){Tp()},beforeI2svg(){Tp()}}}};const Ms=vr||{};Ms[Ps]||(Ms[Ps]={});Ms[Ps].styles||(Ms[Ps].styles={});Ms[Ps].hooks||(Ms[Ps].hooks={});Ms[Ps].shims||(Ms[Ps].shims=[]);var Va=Ms[Ps];const wE=[],CE=function(){_i.removeEventListener("DOMContentLoaded",CE),XA=1,wE.map(e=>e())};let XA=!1;Os&&(XA=(_i.documentElement.doScroll?/^loaded|^c/:/^loaded|^i|^c/).test(_i.readyState),XA||_i.addEventListener("DOMContentLoaded",CE));function aDe(e){Os&&(XA?setTimeout(e,0):wE.push(e))}function dd(e){const{tag:t,attributes:i={},children:n=[]}=e;return typeof e=="string"?vE(e):"<".concat(t," ").concat($6e(i),">").concat(n.map(dd).join(""),"</").concat(t,">")}function S2(e,t,i){if(e&&e[t]&&e[t][i])return{prefix:t,iconName:i,icon:e[t][i]}}var Ip=function(t,i,n,a){var s=Object.keys(t),r=s.length,o=i,l,u,f;for(n===void 0?(l=1,f=t[s[0]]):(l=0,f=n);l<r;l++)u=s[l],f=o(f,t[u],u,t);return f};function sDe(e){const t=[];let i=0;const n=e.length;for(;i<n;){const a=e.charCodeAt(i++);if(a>=55296&&a<=56319&&i<n){const s=e.charCodeAt(i++);(s&64512)==56320?t.push(((a&1023)<<10)+(s&1023)+65536):(t.push(a),i--)}else t.push(a)}return t}function P_(e){const t=sDe(e);return t.length===1?t[0].toString(16):null}function rDe(e,t){const i=e.length;let n=e.charCodeAt(t),a;return n>=55296&&n<=56319&&i>t+1&&(a=e.charCodeAt(t+1),a>=56320&&a<=57343)?(n-55296)*1024+a-56320+65536:n}function D2(e){return Object.keys(e).reduce((t,i)=>{const n=e[i];return!!n.icon?t[n.iconName]=n.icon:t[i]=n,t},{})}function M_(e,t){let i=arguments.length>2&&arguments[2]!==void 0?arguments[2]:{};const{skipHooks:n=!1}=i,a=D2(t);typeof Va.hooks.addPack=="function"&&!n?Va.hooks.addPack(e,D2(t)):Va.styles[e]=Ke(Ke({},Va.styles[e]||{}),a),e==="fas"&&M_("fa",t)}const{styles:Yc,shims:oDe}=Va,EE=Object.keys(J1),lDe=EE.reduce((e,t)=>(e[t]=Object.keys(J1[t]),e),{});let e0=null,kE={},BE={},SE={},DE={},xE={};function uDe(e){return~W6e.indexOf(e)}function cDe(e,t){const i=t.split("-"),n=i[0],a=i.slice(1).join("-");return n===e&&a!==""&&!uDe(a)?a:null}const TE=()=>{const e=n=>Ip(Yc,(a,s,r)=>(a[r]=Ip(s,n,{}),a),{});kE=e((n,a,s)=>(a[3]&&(n[a[3]]=s),a[2]&&a[2].filter(o=>typeof o=="number").forEach(o=>{n[o.toString(16)]=s}),n)),BE=e((n,a,s)=>(n[s]=s,a[2]&&a[2].filter(o=>typeof o=="string").forEach(o=>{n[o]=s}),n)),xE=e((n,a,s)=>{const r=a[2];return n[s]=s,r.forEach(o=>{n[o]=s}),n});const t="far"in Yc||mt.autoFetchSvg,i=Ip(oDe,(n,a)=>{const s=a[0];let r=a[1];const o=a[2];return r==="far"&&!t&&(r="fas"),typeof s=="string"&&(n.names[s]={prefix:r,iconName:o}),typeof s=="number"&&(n.unicodes[s.toString(16)]={prefix:r,iconName:o}),n},{names:{},unicodes:{}});SE=i.names,DE=i.unicodes,e0=qh(mt.styleDefault,{family:mt.familyDefault})};X6e(e=>{e0=qh(e.styleDefault,{family:mt.familyDefault})});TE();function t0(e,t){return(kE[e]||{})[t]}function dDe(e,t){return(BE[e]||{})[t]}function Zr(e,t){return(xE[e]||{})[t]}function IE(e){return SE[e]||{prefix:null,iconName:null}}function fDe(e){const t=DE[e],i=t0("fas",e);return t||(i?{prefix:"fas",iconName:i}:null)||{prefix:null,iconName:null}}function yr(){return e0}const PE=()=>({prefix:null,iconName:null,rest:[]});function ADe(e){let t=_n;const i=EE.reduce((n,a)=>(n[a]="".concat(mt.cssPrefix,"-").concat(a),n),{});return AE.forEach(n=>{(e.includes(i[n])||e.some(a=>lDe[n].includes(a)))&&(t=n)}),t}function qh(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};const{family:i=_n}=t,n=j6e[i][e];if(i===Gh&&!e)return"fad";const a=k2[i][e]||k2[i][n],s=e in Va.styles?e:null;return a||s||null}function hDe(e){let t=[],i=null;return e.forEach(n=>{const a=cDe(mt.cssPrefix,n);a?i=a:n&&t.push(n)}),{iconName:i,rest:t}}funct
`);t.setAttribute(bo,""),t.innerHTML=a}};function L2(e){e()}function UE(e,t){const i=typeof t=="function"?t:xf;if(e.length===0)i();else{let n=L2;mt.mutateApproach===H6e&&(n=vr.requestAnimationFrame||L2),n(()=>{const a=MDe(),s=n0.begin("mutate");e.map(a),s(),i()})}}let a0=!1;function OE(){a0=!0}function H_(){a0=!1}let JA=null;function R2(e){if(!y2||!mt.observeMutations)return;const{treeCallback:t=xf,nodeCallback:i=xf,pseudoElementsCallback:n=xf,observeMutationsRoot:a=_i}=e;JA=new y2(s=>{if(a0)return;const r=yr();Vl(s).forEach(o=>{if(o.type==="childList"&&o.addedNodes.length>0&&!M2(o.addedNodes[0])&&(mt.searchPseudoElements&&n(o.target),t(o.target)),o.type==="attributes"&&o.target.parentNode&&mt.searchPseudoElements&&n(o.target.parentNode),o.type==="attributes"&&M2(o.target)&&~V6e.indexOf(o.attributeName))if(o.attributeName==="class"&&IDe(o.target)){const{prefix:l,iconName:u}=Vh(Z1(o.target));o.target.setAttribute(Y1,l||r),u&&o.target.setAttribute(X1,u)}else PDe(o.target)&&i(o.target)})}),Os&&JA.observe(a,{childList:!0,attributes:!0,characterData:!0,subtree:!0})}function ODe(){JA&&JA.disconnect()}function NDe(e){const t=e.getAttribute("style");let i=[];return t&&(i=t.split(";").reduce((n,a)=>{const s=a.split(":"),r=s[0],o=s.slice(1);return r&&o.length>0&&(n[r]=o.join(":").trim()),n},{})),i}function HDe(e){const t=e.getAttribute("data-prefix"),i=e.getAttribute("data-icon"),n=e.innerText!==void 0?e.innerText.trim():"";let a=Vh(Z1(e));return a.prefix||(a.prefix=yr()),t&&i&&(a.prefix=t,a.iconName=i),a.iconName&&a.prefix||(a.prefix&&n.length>0&&(a.iconName=dDe(a.prefix,e.innerText)||t0(a.prefix,P_(e.innerText))),!a.iconName&&mt.autoFetchSvg&&e.firstChild&&e.firstChild.nodeType===Node.TEXT_NODE&&(a.iconName=e.firstChild.data)),a}function QDe(e){const t=Vl(e.attributes).reduce((a,s)=>(a.name!=="class"&&a.name!=="style"&&(a[s.name]=s.value),a),{}),i=e.getAttribute("title"),n=e.getAttribute("data-fa-title-id");return mt.autoA11y&&(i?t["aria-labelledby"]="".concat(mt.replacementClass,"-title-").concat(n||Kc()):(t["aria-hidden"]="true",t.focusable="false")),t}function jDe(){return{iconName:null,title:null,titleId:null,prefix:null,transform:qa,symbol:!1,mask:{iconName:null,prefix:null,rest:[]},maskId:null,extra:{classes:[],styles:{},attributes:{}}}}function U2(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{styleParser:!0};const{iconName:i,prefix:n,rest:a}=HDe(e),s=QDe(e),r=L_("parseNodeAttributes",{},e);let o=t.styleParser?NDe(e):[];return Ke({iconName:i,title:e.getAttribute("title"),titleId:e.getAttribute("data-fa-title-id"),prefix:n,transform:qa,mask:{iconName:null,prefix:null,rest:[]},maskId:null,symbol:!1,extra:{classes:a,styles:o,attributes:s}},r)}const{styles:GDe}=Va;function NE(e){const t=mt.autoReplaceSvg==="nest"?U2(e,{styleParser:!1}):U2(e);return~t.extra.classes.indexOf(FE)?wr("generateLayersText",e,t):wr("generateSvgReplacementMutation",e,t)}function zDe(){return[...C6e,...B_]}function O2(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:null;if(!Os)return Promise.resolve();const i=_i.documentElement.classList,n=f=>i.add("".concat(E2,"-").concat(f)),a=f=>i.remove("".concat(E2,"-").concat(f)),s=mt.autoFetchSvg?zDe():fE.concat(Object.keys(GDe));s.includes("fa")||s.push("fa");const r=[".".concat(FE,":not([").concat(bo,"])")].concat(s.map(f=>".".concat(f,":not([").concat(bo,"])"))).join(", ");if(r.length===0)return Promise.resolve();let o=[];try{o=Vl(e.querySelectorAll(r))}catch{}if(o.length>0)n("pending"),a("complete");else return Promise.resolve();const l=n0.begin("onTree"),u=o.reduce((f,A)=>{try{const h=NE(A);h&&f.push(h)}catch(h){mE||h.name==="MissingIcon"&&console.error(h)}return f},[]);return new Promise((f,A)=>{Promise.all(u).then(h=>{UE(h,()=>{n("active"),n("complete"),a("pending"),typeof t=="function"&&t(),l(),f()})}).catch(h=>{l(),A(h)})})}function qDe(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:null;NE(e).then(i=>{i&&UE([i],t)})}function VDe(e){return function(t){let i=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};const n=(t||{}).icon?t:R_(t||{});let{m
`),e.removeAttribute(i),n()}).catch(a)}else n()}else n()})}function ixe(e){return Promise.all([Q2(e,"::before"),Q2(e,"::after")])}function nxe(e){return e.parentNode!==document.head&&!~Q6e.indexOf(e.tagName.toUpperCase())&&!e.getAttribute(D_)&&(!e.parentNode||e.parentNode.tagName!=="svg")}function j2(e){if(Os)return new Promise((t,i)=>{const n=Vl(e.querySelectorAll("*")).filter(nxe).map(ixe),a=n0.begin("searchPseudoElements");OE(),Promise.all(n).then(()=>{a(),H_(),t()}).catch(()=>{a(),H_(),i()})})}var axe={hooks(){return{mutationObserverCallbacks(e){return e.pseudoElementsCallback=j2,e}}},provides(e){e.pseudoElements2svg=function(t){const{node:i=_i}=t;mt.searchPseudoElements&&j2(i)}}};let G2=!1;var sxe={mixout(){return{dom:{unwatch(){OE(),G2=!0}}}},hooks(){return{bootstrap(){R2(L_("mutationObserverCallbacks",{}))},noAuto(){ODe()},watch(e){const{observeMutationsRoot:t}=e;G2?H_():R2(L_("mutationObserverCallbacks",{observeMutationsRoot:t}))}}}};const z2=e=>{let t={size:16,x:0,y:0,flipX:!1,flipY:!1,rotate:0};return e.toLowerCase().split(" ").reduce((i,n)=>{const a=n.toLowerCase().split("-"),s=a[0];let r=a.slice(1).join("-");if(s&&r==="h")return i.flipX=!0,i;if(s&&r==="v")return i.flipY=!0,i;if(r=parseFloat(r),isNaN(r))return i;switch(s){case"grow":i.size=i.size+r;break;case"shrink":i.size=i.size-r;break;case"left":i.x=i.x-r;break;case"right":i.x=i.x+r;break;case"up":i.y=i.y-r;break;case"down":i.y=i.y+r;break;case"rotate":i.rotate=i.rotate+r;break}return i},t)};var rxe={mixout(){return{parse:{transform:e=>z2(e)}}},hooks(){return{parseNodeAttributes(e,t){const i=t.getAttribute("data-fa-transform");return i&&(e.transform=z2(i)),e}}},provides(e){e.generateAbstractTransformGrouping=function(t){let{main:i,transform:n,containerWidth:a,iconWidth:s}=t;const r={transform:"translate(".concat(a/2," 256)")},o="translate(".concat(n.x*32,", ").concat(n.y*32,") "),l="scale(".concat(n.size/16*(n.flipX?-1:1),", ").concat(n.size/16*(n.flipY?-1:1),") "),u="rotate(".concat(n.rotate," 0 0)"),f={transform:"".concat(o," ").concat(l," ").concat(u)},A={transform:"translate(".concat(s/2*-1," -256)")},h={outer:r,inner:f,path:A};return{tag:"g",attributes:Ke({},h.outer),children:[{tag:"g",attributes:Ke({},h.inner),children:[{tag:i.icon.tag,children:i.icon.children,attributes:Ke(Ke({},i.icon.attributes),h.path)}]}]}}}};const Mp={x:0,y:0,width:"100%",height:"100%"};function q2(e){let t=arguments.length>1&&arguments[1]!==void 0?arguments[1]:!0;return e.attributes&&(e.attributes.fill||t)&&(e.attributes.fill="black"),e}function oxe(e){return e.tag==="g"?e.children:[e]}var lxe={hooks(){return{parseNodeAttributes(e,t){const i=t.getAttribute("data-fa-mask"),n=i?Vh(i.split(" ").map(a=>a.trim())):PE();return n.prefix||(n.prefix=yr()),e.mask=n,e.maskId=t.getAttribute("data-fa-mask-id"),e}}},provides(e){e.generateAbstractMask=function(t){let{children:i,attributes:n,main:a,mask:s,maskId:r,transform:o}=t;const{width:l,icon:u}=a,{width:f,icon:A}=s,h=eDe({transform:o,containerWidth:f,iconWidth:l}),p={tag:"rect",attributes:Ke(Ke({},Mp),{},{fill:"white"})},F=u.children?{children:u.children.map(q2)}:{},y={tag:"g",attributes:Ke({},h.inner),children:[q2(Ke({tag:u.tag,attributes:Ke(Ke({},u.attributes),h.path)},F))]},E={tag:"g",attributes:Ke({},h.outer),children:[y]},w="mask-".concat(r||Kc()),C="clip-".concat(r||Kc()),B={tag:"mask",attributes:Ke(Ke({},Mp),{},{id:w,maskUnits:"userSpaceOnUse",maskContentUnits:"userSpaceOnUse"}),children:[p,E]},S={tag:"defs",children:[{tag:"clipPath",attributes:{id:C},children:oxe(A)},B]};return i.push(S,{tag:"rect",attributes:Ke({fill:"currentColor","clip-path":"url(#".concat(C,")"),mask:"url(#".concat(w,")")},Mp)}),{children:i,attributes:n}}}},uxe={provides(e){let t=!1;vr.matchMedia&&(t=vr.matchMedia("(prefers-reduced-motion: reduce)").matches),e.missingIconAbstract=function(){const i=[],n={fill:"currentColor"},a={attributeType:"XML",repeatCount:"indefinite",dur:"2s"};i.push({tag:"path",attributes:Ke(Ke({},n),{},{d:"M156.5,447.7l-12.6,29.5c-18.7-9.5-35.9-21.2-51.5-34.9l22.7-22.7C127.6,430.5,141.5,440,156.5,447.7z M40.6,272H8.5 c1.4,21.2,5.4,41
* Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com
* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
* Copyright 2024 Fonticons, Inc.
*/const kxe={prefix:"fas",iconName:"forward-step",icon:[320,512,["step-forward"],"f051","M52.5 440.6c-9.5 7.9-22.8 9.7-34.1 4.4S0 428.4 0 416L0 96C0 83.6 7.2 72.3 18.4 67s24.5-3.6 34.1 4.4l192 160L256 241l0-145c0-17.7 14.3-32 32-32s32 14.3 32 32l0 320c0 17.7-14.3 32-32 32s-32-14.3-32-32l0-145-11.5 9.6-192 160z"]},Bxe=kxe,Sxe={prefix:"fas",iconName:"address-book",icon:[512,512,[62138,"contact-book"],"f2b9","M96 0C60.7 0 32 28.7 32 64l0 384c0 35.3 28.7 64 64 64l288 0c35.3 0 64-28.7 64-64l0-384c0-35.3-28.7-64-64-64L96 0zM208 288l64 0c44.2 0 80 35.8 80 80c0 8.8-7.2 16-16 16l-192 0c-8.8 0-16-7.2-16-16c0-44.2 35.8-80 80-80zm-32-96a64 64 0 1 1 128 0 64 64 0 1 1 -128 0zM512 80c0-8.8-7.2-16-16-16s-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64zM496 192c-8.8 0-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64c0-8.8-7.2-16-16-16zm16 144c0-8.8-7.2-16-16-16s-16 7.2-16 16l0 64c0 8.8 7.2 16 16 16s16-7.2 16-16l0-64z"]},Dxe={prefix:"fas",iconName:"backward",icon:[512,512,[9194],"f04a","M459.5 440.6c9.5 7.9 22.8 9.7 34.1 4.4s18.4-16.6 18.4-29l0-320c0-12.4-7.2-23.7-18.4-29s-24.5-3.6-34.1 4.4L288 214.3l0 41.7 0 41.7L459.5 440.6zM256 352l0-96 0-128 0-32c0-12.4-7.2-23.7-18.4-29s-24.5-3.6-34.1 4.4l-192 160C4.2 237.5 0 246.5 0 256s4.2 18.5 11.5 24.6l192 160c9.5 7.9 22.8 9.7 34.1 4.4s18.4-16.6 18.4-29l0-64z"]},xxe={prefix:"fas",iconName:"volume-low",icon:[448,512,[128264,"volume-down"],"f027","M301.1 34.8C312.6 40 320 51.4 320 64l0 384c0 12.6-7.4 24-18.9 29.2s-25 3.1-34.4-5.3L131.8 352 64 352c-35.3 0-64-28.7-64-64l0-64c0-35.3 28.7-64 64-64l67.8 0L266.7 40.1c9.4-8.4 22.9-10.4 34.4-5.3zM412.6 181.5C434.1 199.1 448 225.9 448 256s-13.9 56.9-35.4 74.5c-10.3 8.4-25.4 6.8-33.8-3.5s-6.8-25.4 3.5-33.8C393.1 284.4 400 271 400 256s-6.9-28.4-17.7-37.3c-10.3-8.4-11.8-23.5-3.5-33.8s23.5-11.8 33.8-3.5z"]},Txe={prefix:"fas",iconName:"lock",icon:[448,512,[128274],"f023","M144 144l0 48 160 0 0-48c0-44.2-35.8-80-80-80s-80 35.8-80 80zM80 192l0-48C80 64.5 144.5 0 224 0s144 64.5 144 144l0 48 16 0c35.3 0 64 28.7 64 64l0 192c0 35.3-28.7 64-64 64L64 512c-35.3 0-64-28.7-64-64L0 256c0-35.3 28.7-64 64-64l16 0z"]},Ixe={prefix:"fas",iconName:"angle-right",icon:[320,512,[8250],"f105","M278.6 233.4c12.5 12.5 12.5 32.8 0 45.3l-160 160c-12.5 12.5-32.8 12.5-45.3 0s-12.5-32.8 0-45.3L210.7 256 73.4 118.6c-12.5-12.5-12.5-32.8 0-45.3s32.8-12.5 45.3 0l160 160z"]},Pxe={prefix:"fas",iconName:"globe",icon:[512,512,[127760],"f0ac","M352 256c0 22.2-1.2 43.6-3.3 64l-185.3 0c-2.2-20.4-3.3-41.8-3.3-64s1.2-43.6 3.3-64l185.3 0c2.2 20.4 3.3 41.8 3.3 64zm28.8-64l123.1 0c5.3 20.5 8.1 41.9 8.1 64s-2.8 43.5-8.1 64l-123.1 0c2.1-20.6 3.2-42 3.2-64s-1.1-43.4-3.2-64zm112.6-32l-116.7 0c-10-63.9-29.8-117.4-55.3-151.6c78.3 20.7 142 77.5 171.9 151.6zm-149.1 0l-176.6 0c6.1-36.4 15.5-68.6 27-94.7c10.5-23.6 22.2-40.7 33.5-51.5C239.4 3.2 248.7 0 256 0s16.6 3.2 27.8 13.8c11.3 10.8 23 27.9 33.5 51.5c11.6 26 20.9 58.2 27 94.7zm-209 0L18.6 160C48.6 85.9 112.2 29.1 190.6 8.4C165.1 42.6 145.3 96.1 135.3 160zM8.1 192l123.1 0c-2.1 20.6-3.2 42-3.2 64s1.1 43.4 3.2 64L8.1 320C2.8 299.5 0 278.1 0 256s2.8-43.5 8.1-64zM194.7 446.6c-11.6-26-20.9-58.2-27-94.6l176.6 0c-6.1 36.4-15.5 68.6-27 94.6c-10.5 23.6-22.2 40.7-33.5 51.5C272.6 508.8 263.3 512 256 512s-16.6-3.2-27.8-13.8c-11.3-10.8-23-27.9-33.5-51.5zM135.3 352c10 63.9 29.8 117.4 55.3 151.6C112.2 482.9 48.6 426.1 18.6 352l116.7 0zm358.1 0c-30 74.1-93.6 130.9-171.9 151.6c25.5-34.2 45.2-87.7 55.3-151.6l116.7 0z"]},Mxe={prefix:"fas",iconName:"server",icon:[512,512,[],"f233","M64 32C28.7 32 0 60.7 0 96l0 64c0 35.3 28.7 64 64 64l384 0c35.3 0 64-28.7 64-64l0-64c0-35.3-28.7-64-64-64L64 32zm280 72a24 24 0 1 1 0 48 24 24 0 1 1 0-48zm48 24a24 24 0 1 1 48 0 24 24 0 1 1 -48 0zM64 288c-35.3 0-64 28.7-64 64l0 64c0 35.3 28.7 64 64 64l384 0c35.3 0 64-28.7 64-64l0-64c0-35.3-28.7-64-64-64L64 288zm280 72a24 24 0 1 1 0 48 24 24 0 1 1 0-48zm56 24a24 24 0 1 1 48 0 24 24 0 1 1 -48 0z"]},Lxe={prefix:"fas",iconName:"pause",icon:[320,512,[9208],"f04c","M48 64C21.5 64 0 85.5 0 112L0 400c0 26.5 21.5 48 48 48l32 0c26.5 0 48-21.5 48-48l0-288c0-26.5-21.5-48-48-48L48 64zm192 0c-26.5 0-48 21.