Rate this Page
★★★★★
python.data-structure#
dictionary#
Original source code:
# mypy: allow-untyped-defsimporttorchclassDictionary(torch.nn.Module):""" Dictionary structures are inlined and flattened along tracing. """defforward(self,x,y):elements={}elements["x2"]=x*xy=y*elements["x2"]return{"y":y}example_args=(torch.randn(3,2),torch.tensor(4))tags={"python.data-structure"}model=Dictionary()torch.export.export(model,example_args)
Result:
ExportedProgram:classGraphModule(torch.nn.Module):defforward(self,x:"f32[3, 2]",y:"i64[]"):mul:"f32[3, 2]"=torch.ops.aten.mul.Tensor(x,x);x=Nonemul_1:"f32[3, 2]"=torch.ops.aten.mul.Tensor(y,mul);y=mul=Nonereturn(mul_1,)Graphsignature:# inputsx:USER_INPUTy:USER_INPUT# outputsmul_1:USER_OUTPUTRangeconstraints:{}
fn_with_kwargs#
Original source code:
# mypy: allow-untyped-defsimporttorchclassFnWithKwargs(torch.nn.Module):""" Keyword arguments are not supported at the moment. """defforward(self,pos0,tuple0,*myargs,mykw0,**mykwargs):out=pos0forargintuple0:out=out*argforarginmyargs:out=out*argout=out*mykw0out=out*mykwargs["input0"]*mykwargs["input1"]returnoutexample_args=(torch.randn(4),(torch.randn(4),torch.randn(4)),*[torch.randn(4),torch.randn(4)])example_kwargs={"mykw0":torch.randn(4),"input0":torch.randn(4),"input1":torch.randn(4),}tags={"python.data-structure"}model=FnWithKwargs()torch.export.export(model,example_args,example_kwargs)
Result:
ExportedProgram:classGraphModule(torch.nn.Module):defforward(self,pos0:"f32[4]",tuple0_0:"f32[4]",tuple0_1:"f32[4]",myargs_0:"f32[4]",myargs_1:"f32[4]",mykw0:"f32[4]",input0:"f32[4]",input1:"f32[4]"):mul:"f32[4]"=torch.ops.aten.mul.Tensor(pos0,tuple0_0);pos0=tuple0_0=Nonemul_1:"f32[4]"=torch.ops.aten.mul.Tensor(mul,tuple0_1);mul=tuple0_1=Nonemul_2:"f32[4]"=torch.ops.aten.mul.Tensor(mul_1,myargs_0);mul_1=myargs_0=Nonemul_3:"f32[4]"=torch.ops.aten.mul.Tensor(mul_2,myargs_1);mul_2=myargs_1=Nonemul_4:"f32[4]"=torch.ops.aten.mul.Tensor(mul_3,mykw0);mul_3=mykw0=Nonemul_5:"f32[4]"=torch.ops.aten.mul.Tensor(mul_4,input0);mul_4=input0=Nonemul_6:"f32[4]"=torch.ops.aten.mul.Tensor(mul_5,input1);mul_5=input1=Nonereturn(mul_6,)Graphsignature:# inputspos0:USER_INPUTtuple0_0:USER_INPUTtuple0_1:USER_INPUTmyargs_0:USER_INPUTmyargs_1:USER_INPUTmykw0:USER_INPUTinput0:USER_INPUTinput1:USER_INPUT# outputsmul_6:USER_OUTPUTRangeconstraints:{}
list_contains#
Original source code:
# mypy: allow-untyped-defsimporttorchclassListContains(torch.nn.Module):""" List containment relation can be checked on a dynamic shape or constants. """defforward(self,x):assertx.size(-1)in[6,2]assertx.size(0)notin[4,5,6]assert"monkey"notin["cow","pig"]returnx+xexample_args=(torch.randn(3,2),)tags={"torch.dynamic-shape","python.data-structure","python.assert"}model=ListContains()torch.export.export(model,example_args)
Result:
ExportedProgram:classGraphModule(torch.nn.Module):defforward(self,x:"f32[3, 2]"):add:"f32[3, 2]"=torch.ops.aten.add.Tensor(x,x);x=Nonereturn(add,)Graphsignature:# inputsx:USER_INPUT# outputsadd:USER_OUTPUTRangeconstraints:{}
list_unpack#
Original source code:
# mypy: allow-untyped-defsimporttorchclassListUnpack(torch.nn.Module):""" Lists are treated as static construct, therefore unpacking should be erased after tracing. """defforward(self,args:list[torch.Tensor]):""" Lists are treated as static construct, therefore unpacking should be erased after tracing. """x,*y=argsreturnx+y[0]example_args=([torch.randn(3,2),torch.tensor(4),torch.tensor(5)],)tags={"python.control-flow","python.data-structure"}model=ListUnpack()torch.export.export(model,example_args)
Result:
ExportedProgram:classGraphModule(torch.nn.Module):defforward(self,args_0:"f32[3, 2]",args_1:"i64[]",args_2:"i64[]"):add:"f32[3, 2]"=torch.ops.aten.add.Tensor(args_0,args_1);args_0=args_1=Nonereturn(add,)Graphsignature:# inputsargs_0:USER_INPUTargs_1:USER_INPUTargs_2:USER_INPUT# outputsadd:USER_OUTPUTRangeconstraints:{}
On this page