File: test_prodigy.py

package info (click to toggle)
python-prodigy 2.3.0-1~exp1
  • links: PTS, VCS
  • area: main
  • in suites: experimental
  • size: 6,488 kB
  • sloc: python: 820; makefile: 10
file content (230 lines) | stat: -rw-r--r-- 6,276 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
import json
import tarfile
import tempfile
from io import BufferedReader, TextIOWrapper
from os.path import basename, splitext
from pathlib import Path

import pytest
from Bio.PDB.PDBParser import PDBParser
from Bio.PDB.Residue import Residue
from Bio.PDB.Structure import Structure

from prodigy_prot.modules.parsers import validate_structure
from prodigy_prot.modules.prodigy import (
    Prodigy,
    analyse_contacts,
    analyse_nis,
    calculate_ic,
)

from . import TEST_DATA


@pytest.fixture
def input_pdb_structure():
    input_f = Path(TEST_DATA, "2oob.pdb")
    parser = PDBParser()
    return parser.get_structure(input_f.stem, input_f)


@pytest.fixture
def compressed_dataset_f():
    return Path(TEST_DATA, "dataset.tgz")


@pytest.fixture
def expected_dataset_json():
    return Path(TEST_DATA, "dataset.json")


@pytest.fixture
def prodigy_class(input_pdb_structure):
    yield Prodigy(struct_obj=input_pdb_structure)


def test_calculate_ic(input_pdb_structure):

    result = calculate_ic(struct=input_pdb_structure, d_cutoff=5.5)

    assert len(result) == 78

    first_hit: tuple[Residue, Residue] = result[0]

    assert first_hit[0].get_resname() == "ASN"
    assert first_hit[1].get_resname() == "LYS"


def test_calculate_ic_with_selection(input_pdb_structure):

    result = calculate_ic(
        struct=input_pdb_structure, d_cutoff=5.5, selection={"A": 0, "B": 1}
    )

    assert len(result) == 78

    first_hit: tuple[Residue, Residue] = result[0]

    assert first_hit[0].get_resname() == "ASN"
    assert first_hit[1].get_resname() == "LYS"


def test_analyse_contacts(input_pdb_structure):

    res_a = input_pdb_structure[0]["A"][(" ", 931, " ")]
    res_b = input_pdb_structure[0]["B"][(" ", 6, " ")]
    contact = (res_a, res_b)

    test_input = [contact]

    result = analyse_contacts(test_input)

    expected_output = {
        "AA": 0.0,
        "PP": 0.0,
        "CC": 0.0,
        "AP": 0.0,
        "CP": 1.0,
        "AC": 0.0,
    }

    assert result == expected_output


def test_analyse_nis():

    test_input = {("B", "ARG", "72"): 0.9}
    apolar, polar, charged = analyse_nis(test_input)

    assert apolar == 0.0
    assert polar == 100.0
    assert charged == 0.0


def test_prodigy_predict(prodigy_class):

    prodigy_class.predict()

    assert prodigy_class.nis_a == pytest.approx(35.5, abs=1.0)
    assert prodigy_class.nis_c == pytest.approx(38.0, abs=1.0)
    assert prodigy_class.ba_val == pytest.approx(-6.2, abs=1.0)

    # This is the actual prediction
    assert prodigy_class.kd_val == pytest.approx(2.7e-5, abs=1e-6)


def test_prodigy_as_dict(prodigy_class):

    result = prodigy_class.as_dict()

    assert isinstance(result, dict)
    assert len(result) == 14


def test_prodigy_print_prediction(prodigy_class):

    outfile = tempfile.NamedTemporaryFile(delete=False)
    assert Path(outfile.name).stat().st_size == 0

    prodigy_class.print_prediction(outfile.name)
    assert Path(outfile.name).stat().st_size != 0

    Path(outfile.name).unlink()


def test_prodigy_print_prediction_quiet(prodigy_class):

    outfile = tempfile.NamedTemporaryFile(delete=False)
    assert Path(outfile.name).stat().st_size == 0

    prodigy_class.print_prediction(outfile.name, True)
    assert Path(outfile.name).stat().st_size != 0

    Path(outfile.name).unlink()


def test_prodigy_print_contacts(input_pdb_structure, prodigy_class):

    res_a = input_pdb_structure[0]["A"][(" ", 931, " ")]
    res_b = input_pdb_structure[0]["B"][(" ", 6, " ")]
    prodigy_class.ic_network = [(res_a, res_b)]

    outfile = tempfile.NamedTemporaryFile(delete=False)
    assert Path(outfile.name).stat().st_size == 0

    prodigy_class.print_contacts(outfile.name)
    assert Path(outfile.name).stat().st_size != 0

    Path(outfile.name).unlink()


def test_print_pymol_script(input_pdb_structure, prodigy_class):
    res_a = input_pdb_structure[0]["A"][(" ", 931, " ")]
    res_b = input_pdb_structure[0]["B"][(" ", 6, " ")]
    prodigy_class.ic_network = [(res_a, res_b)]

    outfile = tempfile.NamedTemporaryFile(delete=False)
    assert Path(outfile.name).stat().st_size == 0

    prodigy_class.print_pymol_script(outfile.name)
    assert Path(outfile.name).stat().st_size != 0

    Path(outfile.name).unlink()


@pytest.mark.integration
def test_dataset_prediction(compressed_dataset_f, expected_dataset_json):
    """
    Test method to compare prediction for 80 dataset cases with
        expected values.
    """
    # load expected data from json
    with open(expected_dataset_json) as fh:
        expected_data = json.load(fh)

    # load dataset PDBs
    dataset = tarfile.open(compressed_dataset_f)
    parser = PDBParser(QUIET=True)

    keys_equal = ["AA", "PP", "CC", "AP", "CP", "AC"]
    diffs = {"ba_val": [], "nis_a": [], "nis_c": []}

    # run prodigy for each dataset in the PDB
    for entry in dataset:
        s_name, s_ext = splitext(basename(entry.name))

        # skip system files in archive
        if not s_name.isalnum() or s_ext != ".pdb":
            continue

        handle = dataset.extractfile(entry)

        # Wrap filehandle to ensure string file handle in Python 3
        handle = TextIOWrapper(BufferedReader(handle))  # type: ignore

        parsed_structure = parser.get_structure(s_name, handle)
        assert isinstance(parsed_structure, Structure)

        s = validate_structure(parsed_structure, selection=["A", "B"])

        # Test for structure object
        assert isinstance(s, Structure)

        #  run prediction and retrieve result dict
        prod = Prodigy(s, selection=["A", "B"])
        prod.predict()
        results = prod.as_dict()

        # check for equality of prdicted interface residues
        for k in keys_equal:
            observed_value = results[k]
            expected_value = expected_data[s_name][k]
            assert observed_value == pytest.approx(expected_value)

        # check that NIS and binding afinity values are within 2% of
        #  expected values and add diffs for summary
        for k in diffs.keys():
            delta = abs(results[k] / expected_data[s_name][k] - 1)
            # assume a difference of less then 2%
            assert delta == pytest.approx(0, abs=0.02)
            diffs[k].append(delta)