Merge with upstream.

This commit is contained in:
Mehdi Goli 2018-08-03 17:04:08 +01:00
commit 225fa112aa
3 changed files with 19 additions and 11 deletions

View File

@ -15,6 +15,7 @@
#define ALIGNMENT 1
#endif
typedef Matrix<float,16,1> Vector16f;
typedef Matrix<float,8,1> Vector8f;
void check_handmade_aligned_malloc()
@ -70,7 +71,7 @@ struct MyStruct
{
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
char dummychar;
Vector8f avec;
Vector16f avec;
};
class MyClassA
@ -78,7 +79,7 @@ class MyClassA
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
char dummychar;
Vector8f avec;
Vector16f avec;
};
template<typename T> void check_dynaligned()
@ -145,6 +146,7 @@ EIGEN_DECLARE_TEST(dynalloc)
CALL_SUBTEST(check_dynaligned<Vector4d>() );
CALL_SUBTEST(check_dynaligned<Vector4i>() );
CALL_SUBTEST(check_dynaligned<Vector8f>() );
CALL_SUBTEST(check_dynaligned<Vector16f>() );
}
{

View File

@ -50,7 +50,13 @@ static void test_static_dimension_failure()
.reshape(Tensor<int, 3>::Dimensions(2, 3, 1))
.concatenate(right, 0);
Tensor<int, 2, DataLayout> alternative = left
.concatenate(right.reshape(Tensor<int, 2>::Dimensions{{{2, 3}}}), 0);
// Clang compiler break with {{{}}} with an ambigous error on copy constructor
// the variadic DSize constructor added for #ifndef EIGEN_EMULATE_CXX11_META_H.
// Solution:
// either the code should change to
// Tensor<int, 2>::Dimensions{{2, 3}}
// or Tensor<int, 2>::Dimensions{Tensor<int, 2>::Dimensions{{2, 3}}}
.concatenate(right.reshape(Tensor<int, 2>::Dimensions{{2, 3}}), 0);
}
template<int DataLayout>

View File

@ -37,7 +37,7 @@ static void test_all_dimensions_trace() {
VERIFY_IS_EQUAL(result1(), sum);
Tensor<float, 5, DataLayout> tensor2(7, 7, 7, 7, 7);
array<ptrdiff_t, 5> dims({{2, 1, 0, 3, 4}});
array<ptrdiff_t, 5> dims = { { 2, 1, 0, 3, 4 } };
Tensor<float, 0, DataLayout> result2 = tensor2.trace(dims);
VERIFY_IS_EQUAL(result2.rank(), 0);
sum = 0.0f;
@ -52,7 +52,7 @@ template <int DataLayout>
static void test_simple_trace() {
Tensor<float, 3, DataLayout> tensor1(3, 5, 3);
tensor1.setRandom();
array<ptrdiff_t, 2> dims1({{0, 2}});
array<ptrdiff_t, 2> dims1 = { { 0, 2 } };
Tensor<float, 1, DataLayout> result1 = tensor1.trace(dims1);
VERIFY_IS_EQUAL(result1.rank(), 1);
VERIFY_IS_EQUAL(result1.dimension(0), 5);
@ -67,7 +67,7 @@ static void test_simple_trace() {
Tensor<float, 4, DataLayout> tensor2(5, 5, 7, 7);
tensor2.setRandom();
array<ptrdiff_t, 2> dims2({{2, 3}});
array<ptrdiff_t, 2> dims2 = { { 2, 3 } };
Tensor<float, 2, DataLayout> result2 = tensor2.trace(dims2);
VERIFY_IS_EQUAL(result2.rank(), 2);
VERIFY_IS_EQUAL(result2.dimension(0), 5);
@ -82,7 +82,7 @@ static void test_simple_trace() {
}
}
array<ptrdiff_t, 2> dims3({{1, 0}});
array<ptrdiff_t, 2> dims3 = { { 1, 0 } };
Tensor<float, 2, DataLayout> result3 = tensor2.trace(dims3);
VERIFY_IS_EQUAL(result3.rank(), 2);
VERIFY_IS_EQUAL(result3.dimension(0), 7);
@ -99,7 +99,7 @@ static void test_simple_trace() {
Tensor<float, 5, DataLayout> tensor3(3, 7, 3, 7, 3);
tensor3.setRandom();
array<ptrdiff_t, 3> dims4({{0, 2, 4}});
array<ptrdiff_t, 3> dims4 = { { 0, 2, 4 } };
Tensor<float, 2, DataLayout> result4 = tensor3.trace(dims4);
VERIFY_IS_EQUAL(result4.rank(), 2);
VERIFY_IS_EQUAL(result4.dimension(0), 7);
@ -116,7 +116,7 @@ static void test_simple_trace() {
Tensor<float, 5, DataLayout> tensor4(3, 7, 4, 7, 5);
tensor4.setRandom();
array<ptrdiff_t, 2> dims5({{1, 3}});
array<ptrdiff_t, 2> dims5 = { { 1, 3 } };
Tensor<float, 3, DataLayout> result5 = tensor4.trace(dims5);
VERIFY_IS_EQUAL(result5.rank(), 3);
VERIFY_IS_EQUAL(result5.dimension(0), 3);
@ -140,7 +140,7 @@ template<int DataLayout>
static void test_trace_in_expr() {
Tensor<float, 4, DataLayout> tensor(2, 3, 5, 3);
tensor.setRandom();
array<ptrdiff_t, 2> dims({{1, 3}});
array<ptrdiff_t, 2> dims = { { 1, 3 } };
Tensor<float, 2, DataLayout> result(2, 5);
result = result.constant(1.0f) - tensor.trace(dims);
VERIFY_IS_EQUAL(result.rank(), 2);
@ -168,4 +168,4 @@ EIGEN_DECLARE_TEST(cxx11_tensor_trace) {
CALL_SUBTEST(test_simple_trace<RowMajor>());
CALL_SUBTEST(test_trace_in_expr<ColMajor>());
CALL_SUBTEST(test_trace_in_expr<RowMajor>());
}
}