from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("bert-base-cased")
encoded_input = tokenizer("Then pass your text to the tokenizer")print(encoded_input)
input_ids = encoded_input["input_ids"]print(encoded_input["input_ids"])print(encoded_input["token_type_ids"])print(encoded_input["attention_mask"])print(tokenizer.decode(encoded_input["input_ids"],skip_special_tokens=True))
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("bert-base-cased")
encoded_input = tokenizer(["pass your text to the tokenizer","tell me","The beautiful woman is shopping on the street"],padding=True, return_tensors="pt")print(encoded_input)