Course Notes: Introduction to Natural Language Processing in Python
  • AI Chat
  • Code
  • Report
  • Beta
    Spinner
    # Import necessary modules
    from nltk.tokenize import sent_tokenize
    from nltk.tokenize import word_tokenize
    
    # Split scene_one into sentences: sentences
    sentences = sent_tokenize(scene_one)
    
    # Use word_tokenize to tokenize the fourth sentence: tokenized_sent
    tokenized_sent = word_tokenize(sentences[3])
    
    # Make a set of unique tokens in the entire scene: unique_tokens
    unique_tokens = set(word_tokenize(scene_one))
    
    # Print the unique tokens result
    print(unique_tokens)