i am referring to https://github.com/explosion/projects/blob/v3/tutorials/nel_emerson/scripts/el_recipe.py based upon v3 and I did paste the section here. As you can see "from prodigy.models.ner.import EntityRecognizer", but EnttiyRecognizer is at EntityRecognizer · spaCy API Documentation from spacy.pipeline.ner. EntityRecognizer from both packages look working. I see the example developed with v2.x, and it has upgraded version with v3.x. Please let me know. Thanks,
import spacy
from spacy.kb import KnowledgeBase
import prodigy
from prodigy.models.ner import EntityRecognizer**
from** prodigy.components.loaders import TXT
from prodigy.util import set_hashes
from prodigy.components.filters import filter_duplicates
import csv
from pathlib import Path
@prodigy.recipe(
"entity_linker.manual",
dataset=("The dataset to use", "positional", None, str),
source=("The source data as a .txt file", "positional", None, Path),
nlp_dir=("Path to the NLP model with a pretrained NER component", "positional", None, Path),
kb_loc=("Path to the KB", "positional", None, Path),
entity_loc=("Path to the file with additional information about the entities", "positional", None, Path),
)
def entity_linker_manual(dataset, source, nlp_dir, kb_loc, entity_loc):
# Load the NLP and KB objects from file
nlp = spacy.load(nlp_dir)
kb = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=1)
kb.load_bulk(kb_loc)
model = EntityRecognizer(nlp)
# Read the pre-defined CSV file into dictionaries mapping QIDs to the full names and descriptions
id_dict = dict()
with entity_loc.open("r", encoding="utf8") as csvfile:
csvreader = csv.reader(csvfile, delimiter=",")
for row in csvreader:
id_dict[row[0]] = (row[1], row[2])
# Initialize the Prodigy stream by running the NER model
stream = TXT(source)
stream = [set_hashes(eg) for eg in stream]
stream = (eg for score, eg in model(stream))