2022-08-24 07:44:36 +08:00
|
|
|
import torch
|
2024-02-07 11:07:59 +08:00
|
|
|
from torch_mlir import torchscript
|
2022-08-24 07:44:36 +08:00
|
|
|
|
|
|
|
from transformers import BertForMaskedLM
|
|
|
|
|
|
|
|
# Wrap the bert model to avoid multiple returns problem
|
|
|
|
class BertTinyWrapper(torch.nn.Module):
|
|
|
|
def __init__(self) -> None:
|
|
|
|
super().__init__()
|
|
|
|
self.bert = BertForMaskedLM.from_pretrained("prajjwal1/bert-tiny", return_dict=False)
|
|
|
|
|
|
|
|
def forward(self, data):
|
|
|
|
return self.bert(data)[0]
|
|
|
|
|
|
|
|
model = BertTinyWrapper()
|
|
|
|
model.eval()
|
|
|
|
data = torch.randint(30522, (2, 128))
|
2023-02-02 21:29:47 +08:00
|
|
|
out_stablehlo_mlir_path = "./bert_tiny_stablehlo.mlir"
|
2022-08-24 07:44:36 +08:00
|
|
|
|
2024-02-07 11:07:59 +08:00
|
|
|
module = torchscript.compile(model, data, output_type=torchscript.OutputType.STABLEHLO, use_tracing=True)
|
2023-02-02 21:29:47 +08:00
|
|
|
with open(out_stablehlo_mlir_path, "w", encoding="utf-8") as outf:
|
2022-08-24 07:44:36 +08:00
|
|
|
outf.write(str(module))
|
|
|
|
|
2023-02-02 21:29:47 +08:00
|
|
|
print(f"StableHLO IR of tiny bert successfully written into {out_stablehlo_mlir_path}")
|