Skip to content

Commit 9375da1

Browse files
committed
deploy: e1d54af
1 parent 093eb5d commit 9375da1

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

109 files changed

+290
-108
lines changed

bundle_index.js

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import fs from 'fs'
2+
import ts from 'typescript'
3+
import { rollup } from 'rollup'
4+
5+
const bundleLayers = await rollup({
6+
external: [/lib\/model\/nns\/layer\/base.js/, /lib\/model\/[^/]+.js/, /lib\/util/, /node_modules/],
7+
input: 'lib/model/nns/layer/index.js',
8+
})
9+
await bundleLayers.write({ format: 'esm', file: './lib/model/nns/layer/index.js' })
10+
const layerDir = './lib/model/nns/layer'
11+
const files = await fs.promises.readdir(layerDir)
12+
for (const file of files) {
13+
if (file === 'base.js' || file === 'index.js') {
14+
continue
15+
}
16+
const text = (await fs.promises.readFile(`${layerDir}/${file}`)).toString()
17+
const source = ts.createSourceFile(`${layerDir}/${file}`, text, ts.ScriptTarget.Latest)
18+
let className = null
19+
source.forEachChild(node => {
20+
if (ts.isClassDeclaration(node) && !className) {
21+
className = node.name.escapedText
22+
}
23+
})
24+
await fs.promises.writeFile(
25+
`${layerDir}/${file}`,
26+
`import { ${className} } from './index.js'\nexport default ${className}`
27+
)
28+
}
29+
30+
const bundleOptimizer = await rollup({
31+
external: [/lib\/util/, /node_modules/],
32+
input: 'lib/model/nns/optimizer.js',
33+
})
34+
await bundleOptimizer.write({ format: 'esm', file: './lib/model/nns/optimizer.js' })
35+
36+
const bundleONNXLayer = await rollup({
37+
external: [/lib\/model\/nns\/onnx\/[^/]+.js/, /lib\/util/, /node_modules/],
38+
input: 'lib/model/nns/onnx/layer/index.js',
39+
})
40+
await bundleONNXLayer.write({ format: 'esm', file: './lib/model/nns/onnx/layer/index.js' })
41+
42+
const bundleONNXOperator = await rollup({
43+
external: [/lib\/model\/nns\/onnx\/[^/]+.js/, /lib\/util/, /node_modules/],
44+
input: 'lib/model/nns/onnx/operators/index.js',
45+
})
46+
await bundleONNXOperator.write({ format: 'esm', file: './lib/model/nns/onnx/operators/index.js' })
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var _=Object.defineProperty;var l=(r,t)=>_(r,"name",{value:t,configurable:!0});import e from"../../../util/matrix.js";import o from"../../neuralnetwork.js";import{FlowLayer as n}from"./base.js";export default class a extends n{static{l(this,"AdditiveCoupling")}constructor({d:t=null,net:s=null,...i}){super(i),this._d=t,this._m=s==null?null:s instanceof o?s:o.fromObject(s)}calc(t){this._d||(this._d=Math.floor(t.cols/2)),this._m||(this._m=o.fromObject([{type:"input"},{type:"full",out_size:20,activation:"leaky_relu"},{type:"full",out_size:t.cols-this._d,activation:"leaky_relu"}],null,"adam")),this._o=t.copy();const s=e.zeros(...t.sizes);return s.set(0,this._d,this._m.calc(t.slice(0,this._d,1))),this._o.add(s),this._o}inverse(t){this._o=t.copy();const s=e.zeros(...t.sizes);return s.set(0,this._d,this._m.calc(t.slice(0,this._d,1))),this._o.sub(s),this._o}jacobianDeterminant(){return 1}grad(t){const s=t.copy(),i=e.zeros(...t.sizes),c=this._m.grad(t.slice(this._d,null,1));return i.set(0,0,c),s.add(i),s}update(t){this._m.update(t.lr)}toObject(){return{type:"additive_coupling",net:this._m?.toObject()}}}a.registLayer();
1+
import{AdditiveCoupling as i}from"./index.js";export default i;

lib/model/nns/layer/apl.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var c=Object.defineProperty;var _=(r,i)=>c(r,"name",{value:i,configurable:!0});import y from"./base.js";export default class l extends y{static{_(this,"AdaptivePiecewiseLinearLayer")}constructor({s:i=2,a=null,b:e=0,...t}){if(super(t),this._s=i,Array.isArray(a))this._a=a;else{this._a=[];for(let s=0;s<i;s++)this._a[s]=a??Math.random()}this._b=Array.isArray(e)?e:Array(i).fill(e),this._l2_decay=.001}calc(i){this._i=i;const a=i.copy();return a.map(e=>this._a.reduce((t,s,h)=>t+s*Math.max(0,this._b[h]-e),Math.max(0,e))),a}grad(i){this._bo=i;const a=i.copy();return a.broadcastOperate(this._i,(e,t)=>e*this._a.reduce((s,h,o)=>s+(this._b[o]-t>0?h:0),t>0?1:0)),a}update(i){const a=Array(this._s).fill(0),e=Array(this._s).fill(0);for(let t=0;t<this._i.length;t++)for(let s=0;s<this._s;s++)this._b[s]-this._i.value[t]>0&&(a[s]+=this._bo.value[t]*(this._b[s]-this._i.value[t]),e[s]+=this._bo.value[t]*this._a[s]);for(let t=0;t<this._s;t++)this._a[t]-=i.delta(`a${t}`,a[t]/this._i.length+this._a[t]*this._l2_decay),this._b[t]-=i.delta(`b${t}`,e[t]/this._i.length+this._b[t]*this._l2_decay)}toObject(){return{type:"apl",s:this._s,a:this._a,b:this._b}}}l.registLayer("apl");
1+
import{AdaptivePiecewiseLinearLayer as e}from"./index.js";export default e;

lib/model/nns/layer/aranda.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var o=Object.defineProperty;var i=(e,t)=>o(e,"name",{value:t,configurable:!0});import c from"./base.js";export default class h extends c{static{i(this,"ArandaLayer")}constructor({l:t=2,...a}){super(a),this._l=t}calc(t){this._i=t;const a=t.copy();return a.map(r=>1-1/(1+this._l*Math.exp(r))**(1/this._l)),a}grad(t){this._bo=t;const a=t.copy();return a.broadcastOperate(this._i,(r,s)=>r*Math.exp(s)*(1+this._l*Math.exp(s))**(-1/this._l-1)),a}toObject(){return{type:"aranda",l:this._l}}}h.registLayer("aranda");
1+
import{ArandaLayer as r}from"./index.js";export default r;

lib/model/nns/layer/argmax.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var u=Object.defineProperty;var d=(c,e)=>u(c,"name",{value:e,configurable:!0});import y from"./base.js";import z from"../../../util/matrix.js";import p from"../../../util/tensor.js";export default class f extends y{static{d(this,"ArgmaxLayer")}constructor({axis:e=-1,keepdims:s=!0,...r}){super(r),this._axis=e,this._keepdims=s}calc(e){!this._keepdims&&e instanceof z&&(e=p.fromArray(e)),this._i=e;const s=this._axis<0?this._axis+e.dimension:this._axis;return this._o=this._i.reduce((r,h,a)=>r[0]<h?[h,a[s]]:r,[-1/0,-1],s,this._keepdims),this._o.map(r=>r[1]),this._o}grad(e){const s=this._axis<0?this._axis+this._i.dimension:this._axis;if(this._bo=e.copy(),this._bo.dimension!==this._i.dimension){const i=this._bo.sizes.concat();i.splice(s,0,1),this._bo.reshape(...i)}this._bo.repeat(this._i.sizes[s],s);const r=Array(this._i.dimension).fill(1);r[s]=this._i.sizes[s];const h=new p(r,Array.from({length:this._i.sizes[s]},(i,t)=>t));this._bo.broadcastOperate(h,(i,t)=>i*t);const a=this._i.copy(),b=a.reduce((i,t)=>Math.max(i,t),-1/0,s,!0);a.broadcastOperate(b,(i,t)=>i-t),a.map(Math.exp);const x=a.reduce((i,t)=>i+t,0,s,!0);a.broadcastOperate(x,(i,t)=>i/t),this._bi=this._bo.copy();const o=Array(this._i.dimension).fill(0);do{for(let i=0;i<this._i.sizes[s];i++){o[s]=i;const t=a.at(o);let m=0;const _=o.concat();for(let n=0;n<this._i.sizes[s];n++){_[s]=n;const l=i===n?1-t:-t;m+=a.at(_)*l*this._bo.at(_)}this._bi.set(o,m)}for(let i=0;i<o.length;i++){if(i===s){o[i]=0;continue}if(o[i]++,o[i]<this._i.sizes[i])break;o[i]=0}}while(o.some(i=>i>0));return this._bi}toObject(){return{type:"argmax",axis:this._axis,keepdims:this._keepdims}}}f.registLayer();
1+
import{ArgmaxLayer as r}from"./index.js";export default r;

lib/model/nns/layer/argmin.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var u=Object.defineProperty;var d=(c,e)=>u(c,"name",{value:e,configurable:!0});import y from"./base.js";import z from"../../../util/matrix.js";import p from"../../../util/tensor.js";export default class f extends y{static{d(this,"ArgminLayer")}constructor({axis:e=-1,keepdims:s=!0,...r}){super(r),this._axis=e,this._keepdims=s}calc(e){!this._keepdims&&e instanceof z&&(e=p.fromArray(e)),this._i=e;const s=this._axis<0?this._axis+e.dimension:this._axis;return this._o=this._i.reduce((r,n,a)=>r[0]>n?[n,a[s]]:r,[1/0,-1],s,this._keepdims),this._o.map(r=>r[1]),this._o}grad(e){const s=this._axis<0?this._axis+this._i.dimension:this._axis;if(this._bo=e.copy(),this._bo.dimension!==this._i.dimension){const i=this._bo.sizes.concat();i.splice(s,0,1),this._bo.reshape(...i)}this._bo.repeat(this._i.sizes[s],s);const r=Array(this._i.dimension).fill(1);r[s]=this._i.sizes[s];const n=new p(r,Array.from({length:this._i.sizes[s]},(i,t)=>t));this._bo.broadcastOperate(n,(i,t)=>i*t);const a=this._i.copy(),b=a.reduce((i,t)=>Math.max(i,t),-1/0,s,!0);a.broadcastOperate(b,(i,t)=>t-i),a.map(Math.exp);const l=a.reduce((i,t)=>i+t,0,s,!0);a.broadcastOperate(l,(i,t)=>i/t),this._bi=this._bo.copy();const o=Array(this._i.dimension).fill(0);do{for(let i=0;i<this._i.sizes[s];i++){o[s]=i;const t=a.at(o);let m=0;const _=o.concat();for(let h=0;h<this._i.sizes[s];h++){_[s]=h;const x=i===h?t-1:t;m+=a.at(_)*x*this._bo.at(_)}this._bi.set(o,m)}for(let i=0;i<o.length;i++){if(i===s){o[i]=0;continue}if(o[i]++,o[i]<this._i.sizes[i])break;o[i]=0}}while(o.some(i=>i>0));return this._bi}toObject(){return{type:"argmin",axis:this._axis,keepdims:this._keepdims}}}f.registLayer();
1+
import{ArgminLayer as r}from"./index.js";export default r;

lib/model/nns/layer/attention.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var y=Object.defineProperty;var p=(c,t)=>y(c,"name",{value:t,configurable:!0});import u from"../../../util/matrix.js";import b from"../../../util/tensor.js";import M from"./base.js";export default class z extends M{static{p(this,"AttentionLayer")}constructor({dk:t=null,dv:h=null,wq:_=null,wk:d=null,wv:e=null,...r}){super(r),this._dk=t,this._dv=h,this._wq=null,typeof _=="string"?this._wqname=_:_&&(this._wq=u.fromArray(_)),this._wk=null,typeof d=="string"?this._wkname=d:d&&(this._wk=u.fromArray(d)),this._wv=null,typeof e=="string"?this._wvname=e:e&&(this._wv=u.fromArray(e))}get dependentLayers(){const t=[];return this._wqname&&t.push(this._wqname),this._wkname&&t.push(this._wkname),this._wvname&&t.push(this._wvname),t}calc(t,h){this._selfattention=!h,h||(h=t),this._dk??=t.sizes.at(-1),this._wqname&&(this._wq=this.graph.getNode(this._wqname).outputValue),this._wq||(this._wq=u.randn(t.sizes[2],this._dk)),this._wkname&&(this._wk=this.graph.getNode(this._wkname).outputValue),this._wk||(this._wk=u.randn(h.sizes[2],this._dk)),this._dv??=t.sizes.at(-1),this._wvname&&(this._wv=this.graph.getNode(this._wvname).outputValue),this._wv||(this._wv=u.randn(h.sizes[2],this._dv)),this._i=t,this._m=h,this._q=t.dot(this._wq),this._k=h.dot(this._wk),this._v=h.dot(this._wv);const _=this._matmul(this._q,this._k,!1,!0);this._atn=_.copy();for(let e=0;e<_.sizes[0];e++)for(let r=0;r<_.sizes[1];r++){let w=[];for(let i=0;i<_.sizes[2];i++)w[i]=_.at(e,r,i)/Math.sqrt(this._dk);const o=w.reduce((i,f)=>Math.max(i,f),-1/0);let n=0;for(let i=0;i<_.sizes[2];i++)w[i]=Math.exp(w[i]-o),n+=w[i];for(let i=0;i<_.sizes[2];i++)this._atn.set([e,r,i],w[i]/n)}return this._matmul(this._atn,this._v)}_matmul(t,h,_=!1,d=!1){const e=[t.sizes[0],_?t.sizes[2]:t.sizes[1],d?h.sizes[1]:h.sizes[2]],r=_?t.sizes[1]:t.sizes[2],w=new b(e);for(let o=0;o<e[0];o++)for(let n=0;n<e[1];n++)for(let i=0;i<e[2];i++){let f=0;for(let l=0;l<r;l++)f+=(_?t.at(o,l,n):t.at(o,n,l))*(d?h.at(o,i,l):h.at(o,l,i));w.set([o,n,i],f)}return w}grad(t){const h=t.sizes[0],_=this._matmul(this._atn,t,!0),d=this._matmul(this._m,_,!0);this._dwv=d.reduce((s,a)=>s+a,0,0).toMatrix(),this._dwv.map(s=>s/h);const e=this._matmul(t,this._v,!1,!0),r=e.copy();for(let s=0;s<e.sizes[0];s++)for(let a=0;a<e.sizes[1];a++)for(let m=0;m<e.sizes[2];m++){const v=e.at(s,a,m);let q=0;for(let k=0;k<e.sizes[2];k++){const g=m===k?1-v:-v;q+=this._atn.at(s,a,k)*g*e.at(s,a,k)}r.set([s,a,m],q/Math.sqrt(this._dk))}const w=this._matmul(r,this._k),o=this._matmul(this._i,w,!0);this._dwq=o.reduce((s,a)=>s+a,0,0).toMatrix(),this._dwq.map(s=>s/h);const n=w.dot(this._wq.t),i=this._matmul(r,this._q,!0),f=this._matmul(this._m,i,!0);this._dwk=f.reduce((s,a)=>s+a,0,0).toMatrix(),this._dwk.map(s=>s/h);const l=i.dot(this._wk.t);if(l.broadcastOperate(_.dot(this._wv.t),(s,a)=>s+a),this._selfattention&&n.broadcastOperate(l,(s,a)=>s+a),this._wqname||this._wkname||this._wvname){const s={};return this._wqname&&(s[this._wqname]=this._dwq),this._wkname&&(s[this._wkname]=this._dwk),this._wvname&&(s[this._wvname]=this._dwv),this._selfattention?[n,s]:[n,l,s]}return this._selfattention?n:[n,l]}update(t){this._wqname||this._wq.sub(t.delta("wq",this._dwq)),this._wkname||this._wk.sub(t.delta("wk",this._dwk)),this._wvname||this._wv.sub(t.delta("wv",this._dwv))}toObject(){return{type:"attention",dk:this._dk,dv:this._dv,wq:this._wqname||this._wq?.toArray(),wk:this._wkname||this._wk?.toArray(),wv:this._wvname||this._wv?.toArray()}}}z.registLayer();
1+
import{AttentionLayer as t}from"./index.js";export default t;

lib/model/nns/layer/averagepool.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var k=Object.defineProperty;var p=(c,i)=>k(c,"name",{value:i,configurable:!0});import z,{NeuralnetworkLayerException as m}from"./base.js";import g from"../../../util/tensor.js";export default class y extends z{static{p(this,"AveragePoolLayer")}constructor({kernel:i,stride:l=null,padding:_=null,channel_dim:a=-1,...t}){if(super(t),this._kernel=i,this._stride=l||i,this._padding=_||0,this._channel_dim=a,this._channel_dim!==-1&&this._channel_dim!==1)throw new m("Invalid channel dimension.")}_index(i,l,_){return this._channel_dim===-1?[i,..._,l]:[i,l,..._]}calc(i){if(Array.isArray(this._kernel)||(this._kernel=Array(i.dimension-2).fill(this._kernel)),i.dimension!==this._kernel.length+2)throw new m("Invalid kernel size",[this,i]);if(Array.isArray(this._stride)||(this._stride=Array(i.dimension-2).fill(this._stride)),i.dimension!==this._stride.length+2)throw new m("Invalid stride size",[this,i]);if(Array.isArray(this._padding)?Array.isArray(this._padding[0])||(this._padding=this._padding.map(t=>[t,t])):this._padding=Array.from({length:i.dimension-2},()=>[this._padding,this._padding]),i.dimension!==this._padding.length+2)throw new m("Invalid padding size",[this,i]);this._i=i;const l=this._channel_dim===-1?1:2,_=[i.sizes[0],...this._kernel.map((t,s)=>Math.ceil(Math.max(0,i.sizes[s+l]+this._padding[s][0]+this._padding[s][1]-t)/this._stride[s])+1)];this._channel_dim===-1?_.push(i.sizes[i.dimension-1]):this._channel_dim===1&&_.splice(1,0,i.sizes[1]);const a=this._channel_dim===-1?i.sizes[i.dimension-1]:i.sizes[1];this._o=new g(_);for(let t=0;t<i.sizes[0];t++)for(let s=0;s<a;s++){const n=Array(i.dimension-2).fill(0);do{const d=Array(i.dimension-2).fill(0);let h=0,r=0;do{const e=n.map((o,f)=>o*this._stride[f]-this._padding[f][0]+d[f]);e.every((o,f)=>0<=o&&o<i.sizes[f+l])&&(h+=i.at(this._index(t,s,e)),r++);for(let o=0;o<d.length&&(d[o]++,!(d[o]<this._kernel[o]));o++)d[o]=0}while(d.some(e=>e>0));this._o.set(this._index(t,s,n),h/r);for(let e=0;e<n.length&&(n[e]++,!(n[e]<_[e+l]));e++)n[e]=0}while(n.some(d=>d>0))}return this._o}grad(i){this._bo=i,this._bi=new g(this._i.sizes);const l=this._channel_dim===-1?1:2,_=this._channel_dim===-1?this._i.sizes[this._i.dimension-1]:this._i.sizes[1];for(let a=0;a<this._i.sizes[0];a++)for(let t=0;t<_;t++){const s=Array(this._i.dimension-2).fill(0);do{const n=Array(this._i.dimension-2).fill(0),d=[];do{const h=s.map((r,e)=>r*this._stride[e]-this._padding[e][0]+n[e]);h.every((r,e)=>0<=r&&r<this._i.sizes[e+l])&&d.push(h);for(let r=0;r<n.length&&(n[r]++,!(n[r]<this._kernel[r]));r++)n[r]=0}while(n.some(h=>h>0));for(const h of d)this._bi.operateAt(this._index(a,t,h),r=>r+this._bo.at(this._index(a,t,s))/d.length);for(let h=0;h<s.length&&(s[h]++,!(s[h]<this._o.sizes[h+l]));h++)s[h]=0}while(s.some(n=>n>0))}return this._bi}toObject(){return{type:"average_pool",kernel:this._kernel,stride:this._stride,padding:this._padding,channel_dim:this._channel_dim}}}y.registLayer();
1+
import{AveragePoolLayer as e}from"./index.js";export default e;
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var b=Object.defineProperty;var u=(l,e)=>b(l,"name",{value:e,configurable:!0});import f from"../../../util/tensor.js";import v,{NeuralnetworkLayerException as g}from"./base.js";export default class d extends v{static{u(this,"BatchNormalizationLayer")}constructor({scale:e=1,offset:_=0,epsilon:n=1e-12,channel_dim:o=-1,input_mean:i,input_var:t,...r}){if(super(r),this._scale=null,typeof e=="string"?this._scalename=e:this._scale=e,this._offset=null,typeof _=="string"?this._offsetname=_:this._offset=_,this._epsilon=n,this._channel_dim=o,this._channel_dim!==-1&&this._channel_dim!==1)throw new g("Invalid channel dimension.");this._input_mean=i,this._input_var=t}get dependentLayers(){const e=[];return this._scalename&&e.push(this._scalename),this._offsetname&&e.push(this._offsetname),typeof this._input_mean=="string"&&e.push(this._input_mean),typeof this._input_var=="string"&&e.push(this._input_var),e}get mean(){return this._mean}get var(){return this._var}calc(e){const _=this._channel_dim<0?this._channel_dim+e.dimension:this._channel_dim,n=Array(e.dimension-_).fill(1);if(n[0]=e.sizes[_],this._scalename?(this._scale=this.graph.getNode(this._scalename).outputValue,this._scale.reshape(...n)):typeof this._scale=="number"?this._scale=new f(n,this._scale):Array.isArray(this._scale)&&(this._scale=f.fromArray(this._scale),this._scale.reshape(...n)),this._offsetname?(this._offset=this.graph.getNode(this._offsetname).outputValue,this._offset.reshape(...n)):typeof this._offset=="number"?this._offset=new f(n,this._offset):Array.isArray(this._offset)&&(this._offset=f.fromArray(this._offset),this._offset.reshape(...n)),typeof this._input_mean=="string"?(this._mean=this.graph.getNode(this._input_mean).outputValue,this._mean.reshape(...n)):Array.isArray(this._input_mean)&&(this._mean=f.fromArray(this._input_mean),this._mean.reshape(...n)),typeof this._input_var=="string"?(this._var=this.graph.getNode(this._input_var).outputValue,this._var.reshape(...n)):Array.isArray(this._input_var)&&(this._var=f.fromArray(this._input_var),this._var.reshape(...n)),!this._input_mean||!this._input_var){const i=Array.from({length:e.dimension},(h,c)=>c);i.splice(_,1);const t=i.reduce((h,c)=>h*e.sizes[c],1),r=e.reduce((h,c)=>h+c/t,0,i,!0);if(this._input_mean||(this._mean=r),!this._input_var){const h=e.copy();h.broadcastOperate(r,(a,s)=>(a-s)**2);const c=h.reduce((a,s)=>a+s/t,0,i,!0);this._var=c}}this._xc=e.copy(),this._xc.broadcastOperate(this._mean,(i,t)=>i-t),this._xh=this._xc.copy(),this._xh.broadcastOperate(this._var,(i,t)=>i/Math.sqrt(t+this._epsilon));const o=this._xh.copy();return o.broadcastOperate(this._scale,(i,t)=>i*t),o.broadcastOperate(this._offset,(i,t)=>i+t),o}grad(e){const _=this._channel_dim<0?this._channel_dim+e.dimension:this._channel_dim;this._bo=e;const n=this._bo.copy();n.broadcastOperate(this._scale,(a,s)=>a*s);const o=this._xc.copy();o.broadcastOperate(n,(a,s)=>a*s);const i=Array.from({length:e.dimension},(a,s)=>s);i.splice(_,1);const t=i.reduce((a,s)=>a*e.sizes[s],1),r=o.reduce((a,s)=>a+s/t,0,i,!0),h=this._xc.copy();h.broadcastOperate(this._var,(a,s)=>a/(s+this._epsilon)),h.broadcastOperate(r,(a,s)=>a*s),h.broadcastOperate(n,(a,s)=>s-a),h.broadcastOperate(this._var,(a,s)=>a/Math.sqrt(s+this._epsilon));const c=h.reduce((a,s)=>a+s/t,0,i,!0);if(h.broadcastOperate(c,(a,s)=>a-s),this._scalename||this._offsetname){const a={};if(this._scalename){const s=this._bo.reduce((m,p,y)=>m+p*this._xh.at(y)/t,0,i,!0);a[this._scalename]=s}if(this._offsetname){const s=this._bo.reduce((m,p)=>m+p/t,0,i,!0);a[this._offsetname]=s}return[h,a]}return h}update(e){if(this._scalename&&this._offsetname)return;const _=this._channel_dim<0?this._channel_dim+this._bo.dimension:this._channel_dim,n=Array.from({length:this._bo.dimension},(i,t)=>t);n.splice(_,1);const o=n.reduce((i,t)=>i*this._bo.sizes[t],1);if(!this._offsetname){const i=this._bo.reduce((t,r)=>t+r/o,0,n,!0);this._offset.broadcastOperate(e.delta("offset",i),(t,r)=>t-r)}if(!this._scalename){const i=this._bo.reduce((t,r,h)=>t+r*this._xh.at(h)/o,0,n,!0);this._scale.broadcastOperate(e.delta("scale",i),(t,r)=>t-r)}}toObject(){return{type:"batch_normalization",scale:this._scalename||this._scale.toArray?.()||this._scale,offset:this._offsetname||this._offset.toArray?.()||this._offset,epsilon:this._epsilon,channel_dim:this._channel_dim,input_mean:this._input_mean,input_var:this._input_var}}}d.registLayer();
1+
import{BatchNormalizationLayer as a}from"./index.js";export default a;

lib/model/nns/layer/bdaa.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
var l=Object.defineProperty;var p=(i,a)=>l(i,"name",{value:a,configurable:!0});import r from"./base.js";export default class s extends r{static{p(this,"BimodalDerivativeAdaptiveActivationLayer")}constructor({alpha:a=1,...t}){super(t),this._alpha=a}calc(a){this._i=a;const t=a.copy();return t.map(e=>(1/(1+Math.exp(-e))-1/(1+Math.exp(-e-this._alpha)))/2),t}grad(a){this._bo=a;const t=a.copy();return t.broadcastOperate(this._i,(e,h)=>e*(Math.exp(-h)/(1+Math.exp(-h))**2-Math.exp(-h-this._alpha)/(1+Math.exp(-h-this._alpha))**2)/2),t}update(a){let t=0;for(let e=0;e<this._i.length;e++)t+=this._bo.value[e]*(-Math.exp(-this._i.value[e]-this._alpha)/(1+Math.exp(-this._i.value[e]-this._alpha))**2)/2;this._alpha-=a.delta("alpha",t/this._i.length)}toObject(){return{type:"bdaa",alpha:this._alpha}}}s.registLayer("bdaa");
1+
import{BimodalDerivativeAdaptiveActivationLayer as i}from"./index.js";export default i;

0 commit comments

Comments
 (0)