diff --git a/Creating Extensions using FFI.md b/Creating Extensions using FFI.md
index a6ee63f120f..185ce4c86a9 100644
--- a/Creating Extensions using FFI.md
+++ b/Creating Extensions using FFI.md
@@ -6,7 +6,7 @@ First, you have to write your C functions.
Below you can find an example implementation of forward and backward functions of a module that adds its both inputs.
-In your .c files you can include TH using an #include
directive, and THC using #include .
+In your `.c` files you can include TH using an `#include ` directive, and THC using `#include `.
ffi utils will make sure a compiler can find them during the build.
@@ -17,18 +17,18 @@ ffi utils will make sure a compiler can find them during the build.
int my_lib_add_forward(THFloatTensor *input1, THFloatTensor *input2,
THFloatTensor *output)
{
-if (!THFloatTensor_isSameSizeAs(input1, input2))
-return 0;
-THFloatTensor_resizeAs(output, input1);
-THFloatTensor_add(output, input1, input2);
-return 1;
+ if (!THFloatTensor_isSameSizeAs(input1, input2))
+ return 0;
+ THFloatTensor_resizeAs(output, input1);
+ THFloatTensor_add(output, input1, input2);
+ return 1;
}
int my_lib_add_backward(THFloatTensor *grad_output, THFloatTensor *grad_input)
{
-THFloatTensor_resizeAs(grad_input, grad_output);
-THFloatTensor_fill(grad_input, 1);
-return 1;
+ THFloatTensor_resizeAs(grad_input, grad_output);
+ THFloatTensor_fill(grad_input, 1);
+ return 1;
}
```
@@ -39,8 +39,7 @@ It will be used by the ffi utils to generate appropriate wrappers.
```C
/* src/my_lib.h */
-int my_lib_add_forward(THFloatTensor *input1, THFloatTensor *input2,
-THFloatTensor *output);
+int my_lib_add_forward(THFloatTensor *input1, THFloatTensor *input2, THFloatTensor *output);
int my_lib_add_backward(THFloatTensor *grad_output, THFloatTensor *grad_input);
```
@@ -59,7 +58,7 @@ with_cuda=False
## Step 2: Include it in your Python code
-After you run it, pytorch will create an _ext directory and put my_lib inside.
+After you run it, pytorch will create an `_ext` directory and put `my_lib` inside.
Package name can have an arbitrary number of packages preceding the final module name (including none).
If the build succeeded you can import your extension just like a regular python file.
@@ -72,16 +71,15 @@ from _ext import my_lib
class MyAddFunction(Function):
-
-def forward(self, input1, input2):
-output = torch.FloatTensor()
-my_lib.my_lib_add_forward(input1, input2, output)
-return output
-
-def backward(self, grad_output):
-grad_input = torch.FloatTensor()
-my_lib.my_lib_add_backward(grad_output, grad_input)
-return grad_input
+ def forward(self, input1, input2):
+ output = torch.FloatTensor()
+ my_lib.my_lib_add_forward(input1, input2, output)
+ return output
+
+ def backward(self, grad_output):
+ grad_input = torch.FloatTensor()
+ my_lib.my_lib_add_backward(grad_output, grad_input)
+ return grad_input
```
```python
@@ -90,9 +88,8 @@ from torch.nn import Module
from functions.add import MyAddFunction
class MyAddModule(Module):
-
-def forward(self, input1, input2):
-return MyAddFunction()(input1, input2)
+ def forward(self, input1, input2):
+ return MyAddFunction()(input1, input2)
```
```python
@@ -102,13 +99,13 @@ from torch.autograd import Variable
from modules.add import MyAddModule
class MyNetwork(nn.Container):
-def __init__(self):
-super(MyNetwork, self).__init__(
-add=MyAddModule(),
-)
+ def __init__(self):
+ super(MyNetwork, self).__init__(
+ add=MyAddModule(),
+ )
-def forward(self, input1, input2):
-return self.add(input1, input2)
+ def forward(self, input1, input2):
+ return self.add(input1, input2)
model = MyNetwork()
input1, input2 = Variable(torch.randn(5, 5)), Variable(torch.randn(5, 5))
| |