26 #ifndef _CXSC_LRVECIVEC_INL_INCLUDED
27 #define _CXSC_LRVECIVEC_INL_INCLUDED
29 #include "l_interval.hpp"
39 { _vvaccu(dp,rv1,rv2); }
46 { _vvaccu(dp,rv2,rv1); }
53 { _vsvaccu(dp,sl,rv); }
60 { _vsvaccu(dp,sl,rv); }
67 { _vsvaccu(dp,sl,rv); }
68 INLINE
void accumulate(idotprecision &dp,
const l_rvector & rv1,
const imatrix_subv &rv2)
75 INLINE
void accumulate(idotprecision &dp,
const ivector & rv1,
const l_rmatrix_subv &rv2)
88 { _vsvaccu(dp,sl,rv); }
89 INLINE
void accumulate(idotprecision &dp,
const l_rmatrix_subv & rv1,
const ivector &rv2)
96 INLINE
void accumulate(idotprecision &dp,
const imatrix_subv & rv1,
const l_rvector &rv2)
104 #if(CXSC_INDEX_CHECK)
109 { _vsvsaccu(dp,sl2,sl1); }
111 #if(CXSC_INDEX_CHECK)
116 { _vsvsaccu(dp,sl1,sl2); }
119 #if(CXSC_INDEX_CHECK)
124 {
return _vvlimult<l_rvector,ivector,l_interval>(rv1,rv2); }
126 #if(CXSC_INDEX_CHECK)
131 {
return _vsvlimult<l_rvector_slice,ivector,l_interval>(sl,rv); }
133 #if(CXSC_INDEX_CHECK)
138 {
return _vsvlimult<ivector_slice,l_rvector,l_interval>(sl,rv); }
140 #if(CXSC_INDEX_CHECK)
145 {
return _vsvslimult<l_rvector_slice,ivector_slice,l_interval>(sl1,sl2); }
148 #if(CXSC_INDEX_CHECK)
153 {
return _vvlimult<l_rvector,ivector,l_interval>(rv2,rv1); }
155 #if(CXSC_INDEX_CHECK)
160 {
return _vsvlimult<ivector_slice,l_rvector,l_interval>(sl,rv); }
162 #if(CXSC_INDEX_CHECK)
167 {
return _vsvlimult<l_rvector_slice,ivector,l_interval>(sl,rv); }
169 #if(CXSC_INDEX_CHECK)
174 {
return _vsvslimult<l_rvector_slice,ivector_slice,l_interval>(sl2,sl1); }