# Tokenize with NLTK tokens = word_tokenize(text)
import nltk from nltk.tokenize import word_tokenize import spacy multikey 1822 better
# Sample text text = "Your deep text here with multiple keywords." # Tokenize with NLTK tokens = word_tokenize(text) import
# Print entities for entity in doc.ents: print(entity.text, entity.label_) multikey 1822 better
# Initialize spaCy nlp = spacy.load("en_core_web_sm")