import pytest

import wn


@pytest.mark.usefixtures("uninitialized_datadir")
def test_lexicons_uninitialized():
    assert len(wn.lexicons()) == 0


@pytest.mark.usefixtures("empty_db")
def test_lexicons_empty():
    assert len(wn.lexicons()) == 0


@pytest.mark.usefixtures("mini_db")
def test_lexicons_mini():
    assert len(wn.lexicons()) == 2
    assert all(isinstance(lex, wn.Lexicon) for lex in wn.lexicons())

    results = wn.lexicons(lang="en")
    assert len(results) == 1
    assert results[0].language == "en"
    results = wn.lexicons(lang="es")
    assert len(results) == 1
    assert results[0].language == "es"

    assert len(wn.lexicons(lexicon="*")) == 2
    assert len(wn.lexicons(lexicon="*:1")) == 2
    assert len(wn.lexicons(lexicon="test-*")) == 2
    assert len(wn.lexicons(lexicon="*-en")) == 1
    results = wn.lexicons(lexicon="test-en")
    assert len(results) == 1
    assert results[0].language == "en"
    results = wn.lexicons(lexicon="test-en:1")
    assert len(results) == 1
    assert results[0].language == "en"
    results = wn.lexicons(lexicon="test-en:*")
    assert len(results) == 1
    assert results[0].language == "en"

    assert wn.lexicons(lexicon="test-en")[0].specifier() == "test-en:1"
    assert wn.lexicons(lexicon="test-es")[0].specifier() == "test-es:1"

    assert wn.lexicons(lexicon="test-en")[0].requires() == {}
    assert wn.lexicons(lexicon="test-es")[0].requires() == {}

    lex = wn.lexicons(lexicon="test-en")[0]  # hashability
    assert {lex: "foo"}[lex] == "foo"


@pytest.mark.usefixtures("mini_db")
def test_lexicons_unknown():
    results = wn.lexicons(lang="unk")
    assert len(results) == 0
    results = wn.lexicons(lexicon="test-unk")
    assert len(results) == 0


@pytest.mark.usefixtures("empty_db")
def test_words_empty():
    assert len(wn.words()) == 0


@pytest.mark.usefixtures("mini_db")
def test_words_mini():
    assert len(wn.words()) == 15
    assert all(isinstance(w, wn.Word) for w in wn.words())

    words = wn.words("information")  # search lemma
    assert len(words) == 1
    assert words[0].lemma() == "information"

    lemma = words[0].lemma(data=True)
    assert lemma.value == "information"
    assert lemma.script == "Latn"
    assert lemma.tags() == [wn.Tag("tag-text", "tag-category")]

    words = wn.words("exemplifies")  # search secondary form
    assert len(words) == 1
    assert words[0].lemma() == "exemplify"

    assert len(wn.words(pos="n")) == 10
    assert all(w.pos == "n" for w in wn.words(pos="n"))
    assert len(wn.words(pos="v")) == 5
    assert len(wn.words(pos="q")) == 0  # fake pos

    assert len(wn.words(lang="en")) == 9
    assert len(wn.words(lang="es")) == 6

    assert len(wn.words(lexicon="test-en")) == 9
    assert len(wn.words(lexicon="test-es")) == 6

    assert len(wn.words(lang="en", lexicon="test-en")) == 9
    assert len(wn.words(pos="v", lang="en")) == 3
    assert len(wn.words("information", lang="en")) == 1
    assert len(wn.words("information", lang="es")) == 0

    with pytest.raises(wn.Error):
        wn.words(lang="unk")
    with pytest.raises(wn.Error):
        wn.words(lexicon="test-unk")


@pytest.mark.usefixtures("empty_db")
def test_lemmas_empty():
    assert len(wn.lemmas()) == 0


@pytest.mark.usefixtures("mini_db_1_4")
def test_lemmas_mini_1_4():
    wordnet = wn.Wordnet(lexicon="test-1.4")
    all_lemmas = wordnet.lemmas()
    assert len(all_lemmas) == 5
    assert all(isinstance(lemma, str) for lemma in all_lemmas)
    assert all_lemmas == ["Foo Bar", "foo bar", "baz", "BAZ", "Baz"]

    # data=True should return Form objects and should not dedup
    lemmas_with_data = wordnet.lemmas(data=True)
    assert len(lemmas_with_data) == 6  # includes duplicate 'baz'
    assert all(isinstance(lemma, wn.Form) for lemma in lemmas_with_data)
    assert [f.value for f in lemmas_with_data] == [
        "Foo Bar",
        "foo bar",
        "baz",
        "BAZ",
        "Baz",
        "baz",
    ]

    # Test deduplication
    baz_lemmas = wordnet.lemmas("baz", data=False)
    assert baz_lemmas == ["baz", "BAZ", "Baz"]

    # With data=True, no dedup
    baz_forms = wordnet.lemmas("baz", data=True)
    assert [f.value for f in baz_forms] == ["baz", "BAZ", "Baz", "baz"]

    # Filter by POS
    assert len(wordnet.lemmas(pos="n")) == 5  # Foo Bar, foo bar, baz, BAZ, Baz
    assert len(wordnet.lemmas(pos="v")) == 1  # baz
    assert len(wordnet.lemmas(pos="q")) == 0  # fake pos

    # Verify lemmas() returns same results as words() + .lemma()
    words = wordnet.words()
    lemmas_from_words = [w.lemma() for w in words]
    lemmas_direct = wordnet.lemmas()
    assert set(lemmas_from_words) == set(lemmas_direct)

    # Test wn module function to wordnet instance method
    assert wn.lemmas(lexicon="test-1.4") == wordnet.lemmas()
    assert wn.lemmas(data=True, lexicon="test-1.4") == wordnet.lemmas(data=True)

    with pytest.raises(wn.Error):
        wn.lemmas(lang="unk")
    with pytest.raises(wn.Error):
        wn.lemmas(lexicon="test-unk")


@pytest.mark.usefixtures("empty_db")
def test_word_empty():
    with pytest.raises(wn.Error):
        assert wn.word("test-es-información-n")


@pytest.mark.usefixtures("mini_db")
def test_word_mini():
    assert wn.word("test-es-información-n")
    assert wn.word("test-es-información-n", lang="es")
    assert wn.word("test-es-información-n", lexicon="test-es")
    with pytest.raises(wn.Error):
        assert wn.word("test-es-información-n", lang="en")
    with pytest.raises(wn.Error):
        assert wn.word("test-es-información-n", lexicon="test-en")
    with pytest.raises(wn.Error):
        assert wn.word("test-es-información-n", lang="unk")
    with pytest.raises(wn.Error):
        assert wn.word("test-es-información-n", lexicon="test-unk")


@pytest.mark.usefixtures("empty_db")
def test_senses_empty():
    assert len(wn.senses()) == 0


@pytest.mark.usefixtures("mini_db")
def test_senses_mini():
    assert len(wn.senses()) == 16
    assert all(isinstance(s, wn.Sense) for s in wn.senses())

    senses = wn.senses("information")  # search lemma
    assert len(senses) == 1
    assert senses[0].word().lemma() == "information"
    assert senses[0].counts() == [3]

    senses = wn.senses("exemplifies")  # search secondary form
    assert len(senses) == 1
    assert senses[0].word().lemma() == "exemplify"
    assert senses[0].word().lemma() in {"exemplify"}
    assert "exemplify" in {senses[0].word().lemma()}

    assert len(wn.senses(pos="n")) == 11
    assert len(wn.senses(pos="v")) == 5
    assert len(wn.senses(pos="q")) == 0  # fake pos

    assert len(wn.senses(lang="en")) == 10
    assert len(wn.senses(lang="es")) == 6

    assert len(wn.senses(lexicon="test-en")) == 10
    assert len(wn.senses(lexicon="test-es")) == 6

    assert len(wn.senses(lang="en", lexicon="test-en")) == 10
    assert len(wn.senses(pos="v", lang="en")) == 3
    assert len(wn.senses("information", lang="en")) == 1
    assert len(wn.senses("information", lang="es")) == 0

    with pytest.raises(wn.Error):
        wn.senses(lang="unk")
    with pytest.raises(wn.Error):
        wn.senses(lexicon="test-unk")


@pytest.mark.usefixtures("empty_db")
def test_sense_empty():
    with pytest.raises(wn.Error):
        assert wn.sense("test-es-información-n-0001-01")


@pytest.mark.usefixtures("mini_db")
def test_sense_mini():
    assert wn.sense("test-es-información-n-0001-01")
    assert wn.sense("test-es-información-n-0001-01", lang="es")
    assert wn.sense("test-es-información-n-0001-01", lexicon="test-es")
    with pytest.raises(wn.Error):
        assert wn.sense("test-es-información-n-0001-01", lang="en")
    with pytest.raises(wn.Error):
        assert wn.sense("test-es-información-n-0001-01", lexicon="test-en")
    with pytest.raises(wn.Error):
        assert wn.sense("test-es-información-n-0001-01", lang="unk")
    with pytest.raises(wn.Error):
        assert wn.sense("test-es-información-n-0001-01", lexicon="test-unk")


@pytest.mark.usefixtures("empty_db")
def test_synsets_empty():
    assert len(wn.synsets()) == 0


@pytest.mark.usefixtures("mini_db")
def test_synsets_mini():
    assert len(wn.synsets()) == 12
    assert all(isinstance(ss, wn.Synset) for ss in wn.synsets())

    synsets = wn.synsets("information")  # search lemma
    assert len(synsets) == 1
    assert "information" in synsets[0].lemmas()

    synsets = wn.synsets("exemplifies")  # search secondary form
    assert len(synsets) == 1
    assert "exemplify" in synsets[0].lemmas()

    assert len(wn.synsets(pos="n")) == 9
    assert len(wn.synsets(pos="v")) == 3
    assert len(wn.synsets(pos="q")) == 0  # fake pos

    assert len(wn.synsets(ili="i67469")) == 2
    assert len(wn.synsets(ili="i67468")) == 0

    assert len(wn.synsets(lang="en")) == 8
    assert len(wn.synsets(lang="es")) == 4

    assert len(wn.synsets(lexicon="test-en")) == 8
    assert len(wn.synsets(lexicon="test-es")) == 4

    assert len(wn.synsets(lang="en", lexicon="test-en")) == 8
    assert len(wn.synsets(pos="v", lang="en")) == 2
    assert len(wn.synsets("information", lang="en")) == 1
    assert len(wn.synsets("information", lang="es")) == 0
    assert len(wn.synsets(ili="i67469", lang="es")) == 1

    with pytest.raises(wn.Error):
        wn.synsets(lang="unk")
    with pytest.raises(wn.Error):
        wn.synsets(lexicon="test-unk")


@pytest.mark.usefixtures("empty_db")
def test_synset_empty():
    with pytest.raises(wn.Error):
        assert wn.synset("test-es-0001-n")


@pytest.mark.usefixtures("mini_db")
def test_synset_mini():
    assert wn.synset("test-es-0001-n")
    assert wn.synset("test-es-0001-n", lang="es")
    assert wn.synset("test-es-0001-n", lexicon="test-es")
    with pytest.raises(wn.Error):
        assert wn.synset("test-es-0001-n", lang="en")
    with pytest.raises(wn.Error):
        assert wn.synset("test-es-0001-n", lexicon="test-en")
    with pytest.raises(wn.Error):
        assert wn.synset("test-es-0001-n", lang="unk")
    with pytest.raises(wn.Error):
        assert wn.synset("test-es-0001-n", lexicon="test-unk")


@pytest.mark.usefixtures("mini_db_1_1")
def test_mini_1_1():
    assert len(wn.lexicons()) == 4
    assert len(wn.lexicons(lang="en")) == 2
    assert len(wn.lexicons(lang="ja")) == 1
    assert wn.lexicons(lang="ja")[0].logo == "logo.svg"

    w = wn.Wordnet(lang="en")
    assert len(w.lexicons()) == 2
    assert len(w.expanded_lexicons()) == 0
    assert len(w.word("test-en-exemplify-v").lemma(data=True).tags()) == 1

    w = wn.Wordnet(lang="ja")
    assert len(w.lexicons()) == 1
    assert len(w.expanded_lexicons()) == 1
    assert len(w.synsets("例え")[0].hypernyms()) == 1
    assert w.synsets("例え")[0].lexfile() == "noun.cognition"
    assert len(w.word("test-ja-例え-n").lemma(data=True).pronunciations()) == 1
    assert w.word("test-ja-例え-n").forms(data=True)[1].id == "test-ja-例え-n-たとえ"
    p = w.word("test-ja-例え-n").lemma(data=True).pronunciations()[0]
    assert p.value == "tatoe"
    assert p.variety == "standard"
    assert p.notation == "ipa"
    assert p.phonemic
    assert p.audio == "tatoe.wav"

    w = wn.Wordnet(lang="ja", expand="")
    assert len(w.lexicons()) == 1
    assert len(w.expanded_lexicons()) == 0
    assert len(w.synsets("例え")[0].hypernyms()) == 0

    w = wn.Wordnet(lexicon="test-en test-en-ext")
    assert len(w.lexicons()) == 2
    assert len(w.expanded_lexicons()) == 0
    assert len(w.synsets("fire")[0].hyponyms()) == 1


@pytest.mark.usefixtures("mini_db_1_1")
def test_mini_1_1_lexicons():
    lex = wn.lexicons(lexicon="test-en")[0]
    assert lex.specifier() == "test-en:1"
    assert not lex.requires()
    assert lex.extends() is None
    assert len(lex.extensions()) == 1
    assert lex.extensions()[0].specifier() == "test-en-ext:1"

    lex = wn.lexicons(lexicon="test-es")[0]
    assert lex.specifier() == "test-es:1"
    assert not lex.requires()
    assert lex.extends() is None
    assert len(lex.extensions()) == 0

    lex = wn.lexicons(lexicon="test-en-ext")[0]
    assert lex.specifier() == "test-en-ext:1"
    assert not lex.requires()
    assert lex.extends() is not None
    assert lex.extends().specifier() == "test-en:1"
    assert len(lex.extensions()) == 0

    lex = wn.lexicons(lexicon="test-ja")[0]
    assert lex.specifier() == "test-ja:1"
    assert "test-en:1" in lex.requires()
    assert lex.extends() is None
    assert len(lex.extensions()) == 0


@pytest.mark.usefixtures("mini_db_1_4")
def test_mini_1_4():
    w = wn.Wordnet("test-1.4:1", normalizer=None)
    # even without a normalizer, entries sharing an index are matched
    assert len(w.words("Foo Bar")) == 2
    assert len(w.words("foo bar")) == 2
    # if the index is missing, the lemma is used; normalization doesn't happen
    assert len(w.words("baz")) == 3
    assert len(w.words("Baz")) == 1
    # sense order follows values of 'n'
    assert [s.id for s in w.senses("foo bar")] == [
        "test-1.4-foo_bar-n-2",
        "test-1.4-foo_bar-n-1",
        "test-1.4-Foo_Bar-n-1",
    ]
    assert [s.id for s in w.senses("baz")] == [
        "test-1.4-baz-n-1",
        "test-1.4-BAZ-n-1",
        "test-1.4-baz-v-1",
    ]
    assert [s.id for s in w.senses("baz", pos="v")] == [
        "test-1.4-baz-v-1",
    ]
    # order is undecided when implicit or explicit valus of n are overlapping
    assert {s.id for s in w.senses("Baz")} == {
        "test-1.4-Baz-n-1",
        "test-1.4-Baz-n-2",
    }
    # synset order also follows index
    assert [ss.id for ss in w.synsets("foo bar")] == [
        "test-1.4-2",
        "test-1.4-1",
    ]
