27 #ifndef _CXSC_LIVECRMAT_INL_INCLUDED
28 #define _CXSC_LIVECRMAT_INL_INCLUDED
38 { _vmconstr<l_ivector,rmatrix,l_interval>(*
this,sl); }
45 { _vmsconstr<l_ivector,rmatrix_slice,l_interval>(*
this,sl); }
49 for (
int i=0, j=v.start;i<v.size;i++,j+=v.offset)
83 { _vmvaccu<idotprecision,l_ivector,rmatrix_subv>(dp,rv2,rv1); }
90 { _vmvaccu<idotprecision,l_ivector,rmatrix_subv>(dp,rv1,rv2); }
98 { _vmvaccu<idotprecision,l_ivector,rmatrix_subv>(dp,
l_ivector(rv2),rv1); }
100 #if(CXSC_INDEX_CHECK)
105 { _vmvaccu<idotprecision,l_ivector,rmatrix_subv>(dp,
l_ivector(rv1),rv2); }
110 #if(CXSC_INDEX_CHECK)
115 {
return _vmassign<l_ivector,rmatrix,l_interval>(*
this,m); }
117 #if(CXSC_INDEX_CHECK)
122 {
return _vmassign<l_ivector,rmatrix,l_interval>(*
this,
rmatrix(m)); }
124 #if(CXSC_INDEX_CHECK)
129 {
return _vsvassign(*
this,
rvector(m)); }
131 #if(CXSC_INDEX_CHECK)
139 #if(CXSC_INDEX_CHECK)
144 {
return _mvlimult<rmatrix,l_ivector,l_ivector>(m,v); }
146 #if(CXSC_INDEX_CHECK)
151 {
return _msvlimult<rmatrix_slice,l_ivector,l_ivector>(ms,v); }
153 #if(CXSC_INDEX_CHECK)
158 {
return _vmlimult<l_ivector,rmatrix,l_ivector>(v,m); }
160 #if(CXSC_INDEX_CHECK)
165 {
return _vmslimult<l_ivector,rmatrix_slice,l_ivector>(v,ms); }
167 #if(CXSC_INDEX_CHECK)
172 {
return _vmlimultassign<l_ivector,rmatrix,l_interval>(v,m); }
174 #if(CXSC_INDEX_CHECK)
179 {
return _vmslimultassign<l_ivector,rmatrix_slice,l_interval>(v,ms); }
182 #if(CXSC_INDEX_CHECK)
187 {
return _vmlimult<l_ivector,rmatrix,l_ivector>(
l_ivector(v),m); }
189 #if(CXSC_INDEX_CHECK)
194 {
return _vsmlimultassign<l_ivector_slice,rmatrix,l_interval>(*
this,m); }