27 #ifndef _CXSC_LRVECRMAT_INL_INCLUDED
28 #define _CXSC_LRVECRMAT_INL_INCLUDED
38 { _vmconstr<l_rvector,rmatrix,l_real>(*
this,sl); }
45 { _vmsconstr<l_rvector,rmatrix_slice,l_real>(*
this,sl); }
49 for (
int i=0, j=v.start;i<v.size;i++,j+=v.offset)
83 { _vmvaccu<dotprecision,l_rvector,rmatrix_subv>(dp,rv2,rv1); }
90 { _vmvaccu<dotprecision,l_rvector,rmatrix_subv>(dp,rv1,rv2); }
97 { _vmvaccu<idotprecision,l_rvector,rmatrix_subv>(dp,rv2,rv1); }
104 { _vmvaccu<idotprecision,l_rvector,rmatrix_subv>(dp,rv1,rv2); }
107 #if(CXSC_INDEX_CHECK)
112 { _vmvaccu<dotprecision,l_rvector,rmatrix_subv>(dp,
l_rvector(rv2),rv1); }
114 #if(CXSC_INDEX_CHECK)
119 { _vmvaccu<dotprecision,l_rvector,rmatrix_subv>(dp,
l_rvector(rv1),rv2); }
121 #if(CXSC_INDEX_CHECK)
126 { _vmvaccu<idotprecision,l_rvector,rmatrix_subv>(dp,
l_rvector(rv2),rv1); }
128 #if(CXSC_INDEX_CHECK)
133 { _vmvaccu<idotprecision,l_rvector,rmatrix_subv>(dp,
l_rvector(rv1),rv2); }
138 #if(CXSC_INDEX_CHECK)
143 {
return _vmassign<l_rvector,rmatrix,l_real>(*
this,m); }
145 #if(CXSC_INDEX_CHECK)
150 {
return _vmassign<l_rvector,rmatrix,l_real>(*
this,
rmatrix(m)); }
152 #if(CXSC_INDEX_CHECK)
157 {
return _vsvassign(*
this,
rvector(m)); }
159 #if(CXSC_INDEX_CHECK)
167 #if(CXSC_INDEX_CHECK)
172 {
return _mvlmult<rmatrix,l_rvector,l_rvector>(m,v); }
174 #if(CXSC_INDEX_CHECK)
179 {
return _msvlmult<rmatrix_slice,l_rvector,l_rvector>(ms,v); }
181 #if(CXSC_INDEX_CHECK)
186 {
return _vmlmult<l_rvector,rmatrix,l_rvector>(v,m); }
188 #if(CXSC_INDEX_CHECK)
193 {
return _vmslmult<l_rvector,rmatrix_slice,l_rvector>(v,ms); }
195 #if(CXSC_INDEX_CHECK)
200 {
return _vmlmultassign<l_rvector,rmatrix,l_real>(v,m); }
202 #if(CXSC_INDEX_CHECK)
207 {
return _vmslmultassign<l_rvector,rmatrix_slice,l_real>(v,ms); }
210 #if(CXSC_INDEX_CHECK)
215 {
return _vmlmult<l_rvector,rmatrix,l_rvector>(
l_rvector(v),m); }
217 #if(CXSC_INDEX_CHECK)
222 { *
this=*
this*m;
return *
this; }