Skip to content

Commit

Permalink
fix build without neon
Browse files Browse the repository at this point in the history
  • Loading branch information
nihui committed Feb 25, 2020
1 parent ed81092 commit 57bedd5
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 3 deletions.
23 changes: 22 additions & 1 deletion src/layer/arm/convolution_arm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -974,7 +974,28 @@ int Convolution_arm::forward(const Mat& bottom_blob, Mat& top_blob, const Option
kptr += maxk;
}

sum = activation_ss(sum, activation_type, activation_params);
if (activation_type == 1)
{
sum = std::max(sum, 0.f);
}
else if (activation_type == 2)
{
float slope = activation_params[0];
sum = sum > 0.f ? sum : sum * slope;
}
else if (activation_type == 3)
{
float min = activation_params[0];
float max = activation_params[1];
if (sum < min)
sum = min;
if (sum > max)
sum = max;
}
else if (activation_type == 4)
{
sum = static_cast<float>(1.f / (1.f + exp(-sum)));
}

outptr[j] = sum;
}
Expand Down
23 changes: 22 additions & 1 deletion src/layer/arm/deconvolution_arm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -644,7 +644,28 @@ int Deconvolution_arm::forward(const Mat& bottom_blob, Mat& top_blob, const Opti
kptr += maxk;
}

sum = activation_ss(sum, activation_type, activation_params);
if (activation_type == 1)
{
sum = std::max(sum, 0.f);
}
else if (activation_type == 2)
{
float slope = activation_params[0];
sum = sum > 0.f ? sum : sum * slope;
}
else if (activation_type == 3)
{
float min = activation_params[0];
float max = activation_params[1];
if (sum < min)
sum = min;
if (sum > max)
sum = max;
}
else if (activation_type == 4)
{
sum = static_cast<float>(1.f / (1.f + exp(-sum)));
}

outptr[j] = sum;
}
Expand Down
23 changes: 22 additions & 1 deletion src/layer/arm/deconvolutiondepthwise_arm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,28 @@ int DeconvolutionDepthWise_arm::forward(const Mat& bottom_blob, Mat& top_blob, c
}
}

sum = activation_ss(sum, activation_type, activation_params);
if (activation_type == 1)
{
sum = std::max(sum, 0.f);
}
else if (activation_type == 2)
{
float slope = activation_params[0];
sum = sum > 0.f ? sum : sum * slope;
}
else if (activation_type == 3)
{
float min = activation_params[0];
float max = activation_params[1];
if (sum < min)
sum = min;
if (sum > max)
sum = max;
}
else if (activation_type == 4)
{
sum = static_cast<float>(1.f / (1.f + exp(-sum)));
}

outptr[j] = sum;
}
Expand Down

0 comments on commit 57bedd5

Please sign in to comment.