1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218
|
Description: Allow more test_chord warnings
Author: Josenilson Ferreira da silva <nilsonfsilva@hotmail.com>
Forwarded: https://github.com/craffel/mir_eval/issues/352
Last-Update: 2023-01-16
@@ -540,7 +540,7 @@ def _contingency_matrix(reference_indice
return scipy.sparse.coo_matrix((np.ones(ref_class_idx.shape[0]),
(ref_class_idx, est_class_idx)),
shape=(n_ref_classes, n_est_classes),
- dtype=np.int).toarray()
+ dtype=int).toarray()
def _adjusted_rand_index(reference_indices, estimated_indices):
@@ -720,7 +720,7 @@ def _entropy(labels):
if len(labels) == 0:
return 1.0
label_idx = np.unique(labels, return_inverse=True)[1]
- pi = np.bincount(label_idx).astype(np.float)
+ pi = np.bincount(label_idx).astype(float)
pi = pi[pi > 0]
pi_sum = np.sum(pi)
# log(a / b) should be calculated as log(a) - log(b) for
@@ -510,7 +510,7 @@ def encode(chord_label, reduce_extended_
semitone_bitmap += scale_degree_to_bitmap(scale_degree,
reduce_extended_chords)
- semitone_bitmap = (semitone_bitmap > 0).astype(np.int)
+ semitone_bitmap = (semitone_bitmap > 0).astype(int)
if not semitone_bitmap[bass_number] and strict_bass_intervals:
raise InvalidChordException(
"Given bass scale degree is absent from this chord: "
@@ -544,8 +544,8 @@ def encode_many(chord_labels, reduce_ext
"""
num_items = len(chord_labels)
- roots, basses = np.zeros([2, num_items], dtype=np.int)
- semitones = np.zeros([num_items, 12], dtype=np.int)
+ roots, basses = np.zeros([2, num_items], dtype=int)
+ semitones = np.zeros([num_items, 12], dtype=int)
local_cache = dict()
for i, label in enumerate(chord_labels):
result = local_cache.get(label, None)
@@ -749,7 +749,7 @@ def thirds(reference_labels, estimated_l
eq_roots = ref_roots == est_roots
eq_thirds = ref_semitones[:, 3] == est_semitones[:, 3]
- comparison_scores = (eq_roots * eq_thirds).astype(np.float)
+ comparison_scores = (eq_roots * eq_thirds).astype(float)
# Ignore 'X' chords
comparison_scores[np.any(ref_semitones < 0, axis=1)] = -1.0
@@ -797,7 +797,7 @@ def thirds_inv(reference_labels, estimat
eq_root = ref_roots == est_roots
eq_bass = ref_bass == est_bass
eq_third = ref_semitones[:, 3] == est_semitones[:, 3]
- comparison_scores = (eq_root * eq_third * eq_bass).astype(np.float)
+ comparison_scores = (eq_root * eq_third * eq_bass).astype(float)
# Ignore 'X' chords
comparison_scores[np.any(ref_semitones < 0, axis=1)] = -1.0
@@ -845,7 +845,7 @@ def triads(reference_labels, estimated_l
eq_roots = ref_roots == est_roots
eq_semitones = np.all(
np.equal(ref_semitones[:, :8], est_semitones[:, :8]), axis=1)
- comparison_scores = (eq_roots * eq_semitones).astype(np.float)
+ comparison_scores = (eq_roots * eq_semitones).astype(float)
# Ignore 'X' chords
comparison_scores[np.any(ref_semitones < 0, axis=1)] = -1.0
@@ -894,7 +894,7 @@ def triads_inv(reference_labels, estimat
eq_basses = ref_bass == est_bass
eq_semitones = np.all(
np.equal(ref_semitones[:, :8], est_semitones[:, :8]), axis=1)
- comparison_scores = (eq_roots * eq_semitones * eq_basses).astype(np.float)
+ comparison_scores = (eq_roots * eq_semitones * eq_basses).astype(float)
# Ignore 'X' chords
comparison_scores[np.any(ref_semitones < 0, axis=1)] = -1.0
@@ -941,7 +941,7 @@ def tetrads(reference_labels, estimated_
eq_roots = ref_roots == est_roots
eq_semitones = np.all(np.equal(ref_semitones, est_semitones), axis=1)
- comparison_scores = (eq_roots * eq_semitones).astype(np.float)
+ comparison_scores = (eq_roots * eq_semitones).astype(float)
# Ignore 'X' chords
comparison_scores[np.any(ref_semitones < 0, axis=1)] = -1.0
@@ -989,7 +989,7 @@ def tetrads_inv(reference_labels, estima
eq_roots = ref_roots == est_roots
eq_basses = ref_bass == est_bass
eq_semitones = np.all(np.equal(ref_semitones, est_semitones), axis=1)
- comparison_scores = (eq_roots * eq_semitones * eq_basses).astype(np.float)
+ comparison_scores = (eq_roots * eq_semitones * eq_basses).astype(float)
# Ignore 'X' chords
comparison_scores[np.any(ref_semitones < 0, axis=1)] = -1.0
@@ -1035,7 +1035,7 @@ def root(reference_labels, estimated_lab
validate(reference_labels, estimated_labels)
ref_roots, ref_semitones = encode_many(reference_labels, False)[:2]
est_roots = encode_many(estimated_labels, False)[0]
- comparison_scores = (ref_roots == est_roots).astype(np.float)
+ comparison_scores = (ref_roots == est_roots).astype(float)
# Ignore 'X' chords
comparison_scores[np.any(ref_semitones < 0, axis=1)] = -1.0
@@ -1087,7 +1087,7 @@ def mirex(reference_labels, estimated_la
eq_chroma = (ref_chroma * est_chroma).sum(axis=-1)
# Chroma matching for set bits
- comparison_scores = (eq_chroma >= min_intersection).astype(np.float)
+ comparison_scores = (eq_chroma >= min_intersection).astype(float)
# No-chord matching; match -1 roots, SKIP_CHORDS dropped next
no_root = np.logical_and(ref_data[0] == -1, est_data[0] == -1)
@@ -1150,7 +1150,7 @@ def majmin(reference_labels, estimated_l
eq_root = ref_roots == est_roots
eq_quality = np.all(np.equal(ref_semitones[:, :8],
est_semitones[:, :8]), axis=1)
- comparison_scores = (eq_root * eq_quality).astype(np.float)
+ comparison_scores = (eq_root * eq_quality).astype(float)
# Test for Major / Minor / No-chord
is_maj = np.all(np.equal(ref_semitones[:, :8], maj_semitones), axis=1)
@@ -1217,7 +1217,7 @@ def majmin_inv(reference_labels, estimat
eq_root_bass = (ref_roots == est_roots) * (ref_bass == est_bass)
eq_semitones = np.all(np.equal(ref_semitones[:, :8],
est_semitones[:, :8]), axis=1)
- comparison_scores = (eq_root_bass * eq_semitones).astype(np.float)
+ comparison_scores = (eq_root_bass * eq_semitones).astype(float)
# Test for Major / Minor / No-chord
is_maj = np.all(np.equal(ref_semitones[:, :8], maj_semitones), axis=1)
@@ -1280,7 +1280,7 @@ def sevenths(reference_labels, estimated
eq_root = ref_roots == est_roots
eq_semitones = np.all(np.equal(ref_semitones, est_semitones), axis=1)
- comparison_scores = (eq_root * eq_semitones).astype(np.float)
+ comparison_scores = (eq_root * eq_semitones).astype(float)
# Test for reference chord inclusion
is_valid = np.array([np.all(np.equal(ref_semitones, semitones), axis=1)
@@ -1335,7 +1335,7 @@ def sevenths_inv(reference_labels, estim
eq_roots_basses = (ref_roots == est_roots) * (ref_basses == est_basses)
eq_semitones = np.all(np.equal(ref_semitones, est_semitones), axis=1)
- comparison_scores = (eq_roots_basses * eq_semitones).astype(np.float)
+ comparison_scores = (eq_roots_basses * eq_semitones).astype(float)
# Test for Major / Minor / No-chord
is_valid = np.array([np.all(np.equal(ref_semitones, semitones), axis=1)
@@ -19,9 +19,9 @@ SCORES_GLOB = 'data/beat/output*.json'
def test_trim_beats():
# Construct dummy beat times [0., 1., ...]
- dummy_beats = np.arange(10, dtype=np.float)
+ dummy_beats = np.arange(10, dtype=float)
# We expect trim_beats to remove all beats < 5s
- expected_beats = np.arange(5, 10, dtype=np.float)
+ expected_beats = np.arange(5, 10, dtype=float)
assert np.allclose(mir_eval.beat.trim_beats(dummy_beats), expected_beats)
@@ -51,7 +51,7 @@ def __unit_test_beat_function(metric):
nose.tools.assert_raises(ValueError, metric, beats, beats)
# Valid beats which are the same produce a score of 1 for all metrics
- beats = np.arange(10, dtype=np.float)
+ beats = np.arange(10, dtype=float)
assert np.allclose(metric(beats, beats), 1)
@@ -43,7 +43,7 @@ def __unit_test_onset_function(metric):
nose.tools.assert_raises(ValueError, metric, onsets, onsets)
# Valid onsets which are the same produce a score of 1 for all metrics
- onsets = np.arange(10, dtype=np.float)
+ onsets = np.arange(10, dtype=float)
assert np.allclose(metric(onsets, onsets), 1)
@@ -386,14 +386,14 @@ def p_score(reference_beats,
estimated_beats = np.array(estimated_beats - offset)
reference_beats = np.array(reference_beats - offset)
# Get the largest time index
- end_point = np.int(np.ceil(np.max([np.max(estimated_beats),
+ end_point = int(np.ceil(np.max([np.max(estimated_beats),
np.max(reference_beats)])))
# Make impulse trains with impulses at beat locations
reference_train = np.zeros(end_point*sampling_rate + 1)
- beat_indices = np.ceil(reference_beats*sampling_rate).astype(np.int)
+ beat_indices = np.ceil(reference_beats*sampling_rate).astype(int)
reference_train[beat_indices] = 1.0
estimated_train = np.zeros(end_point*sampling_rate + 1)
- beat_indices = np.ceil(estimated_beats*sampling_rate).astype(np.int)
+ beat_indices = np.ceil(estimated_beats*sampling_rate).astype(int)
estimated_train[beat_indices] = 1.0
# Window size to take the correlation over
# defined as .2*median(inter-annotation-intervals)
|