forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathTestBatched.test_if_noelse_with_scalar.expect
45 lines (45 loc) · 1.66 KB
/
TestBatched.test_if_noelse_with_scalar.expect
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
graph(%a.1_data : Tensor
%a.1_mask : Tensor
%a.1_dims : Tensor
%b_data : Tensor
%b_mask : Tensor
%b_dims : Tensor) {
%6 : int = prim::Constant[value=1]()
%7 : float = prim::Constant[value=0.1]()
%8 : Float() = prim::NumToTensor(%7)
%other : float = prim::Float(%8)
%10 : Tensor = aten::gt(%a.1_data, %other)
%11 : Long() = prim::NumToTensor(%6)
%alpha : float = prim::Float(%11)
%data : Tensor = aten::add(%a.1_data, %b_data, %alpha)
%mask : Tensor = aten::mul(%a.1_mask, %b_mask)
%dims : Tensor = aten::__or__(%a.1_dims, %b_dims)
%16 : bool = prim::Constant[value=1]()
%17 : int = prim::Constant[value=1]()
%18 : Tensor = aten::type_as(%a.1_mask, %10)
%data.2 : Tensor = aten::mul(%10, %18)
%20 : int = aten::dim(%data.2)
%21 : bool = aten::eq(%20, %17)
%cond_data : Tensor, %cond_mask : Tensor = prim::If(%21)
block0() {
%24 : int = aten::dim(%data)
%25 : int = aten::sub(%24, %17)
%data.4 : Tensor = prim::Loop(%25, %16, %data.2)
block0(%27 : int, %28 : Tensor) {
%29 : int = aten::dim(%28)
%data.3 : Tensor = aten::unsqueeze(%28, %29)
-> (%16, %data.3)
}
%cond_data.1 : Tensor = aten::expand_as(%data.4, %data)
%cond_mask.1 : Tensor = aten::expand_as(%data.4, %mask)
-> (%cond_data.1, %cond_mask.1)
}
block1() {
-> (%data.2, %data.2)
}
%res_data : Tensor = aten::where(%cond_data, %data, %a.1_data)
%res_mask : Tensor = aten::where(%cond_mask, %mask, %a.1_mask)
%res_dims : Tensor = aten::__or__(%dims, %a.1_dims)
%36 : (Tensor, Tensor, Tensor) = prim::TupleConstruct(%res_data, %res_mask, %res_dims)
return (%36);
}