Added activation functions: leaky_relu relu6 softplus elu celu logsigmoid (#108)

* added leaky_relu relu6 softplus elu celu logsigmoid
* minor fixes for docstring and benchmark imports
* fixed elu implementation and added tests
* added tests for optional param, changed leaky_relu param to fit pytorch documentation
This commit is contained in:
Jason
2023-12-10 19:31:38 -05:00
committed by GitHub
parent 71d1fff90a
commit b0cd092b7f
6 changed files with 344 additions and 0 deletions

View File

@@ -193,6 +193,18 @@ if __name__ == "__main__":
compare_filtered("softmax --size 2x1024x1024 --axis 1 --fused --cpu")
compare_filtered("relu --size 32x16x1024")
compare_filtered("relu --size 32x16x1024 --cpu")
compare_filtered("leaky_relu --size 32x16x1024")
compare_filtered("leaky_relu --size 32x16x1024 --cpu")
compare_filtered("elu --size 32x16x1024")
compare_filtered("elu --size 32x16x1024 --cpu")
compare_filtered("relu6 --size 32x16x1024")
compare_filtered("relu6 --size 32x16x1024 --cpu")
compare_filtered("softplus --size 32x16x1024")
compare_filtered("softplus --size 32x16x1024 --cpu")
compare_filtered("celu --size 32x16x1024")
compare_filtered("celu --size 32x16x1024 --cpu")
compare_filtered("log_sigmoid --size 32x16x1024")
compare_filtered("log_sigmoid --size 32x16x1024 --cpu")
compare_filtered("scalar_mul --size 32x16x1024")
compare_filtered("scalar_mul --size 32x16x1024 --cpu")
compare_filtered("cross_entropy --size 256x1024")