# Load library
from nltk.tokenize import word_tokenize, sent_tokenize
Create Text Data
# Create text
string = "The science of today is the technology of tomorrow. Tomorrow is today."
# Tokenize words
# Tokenize sentences
['The science of today is the technology of tomorrow.', 'Tomorrow is today.']