1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175
|
--- a/flye/assembly/scaffolder.py
+++ b/flye/assembly/scaffolder.py
@@ -12,7 +12,6 @@
import flye.utils.fasta_parser as fp
import flye.config.py_cfg as cfg
-from six import iteritems
logger = logging.getLogger()
@@ -128,7 +127,7 @@
contigs_stats[tokens[0]].coverage = tokens[2]
scaffolds_stats = {}
- for scf, scf_seq in iteritems(scaffolds):
+ for scf, scf_seq in scaffolds.items():
scaffolds_stats[scf] = SeqStats(scf)
scf_length = sum([int(contigs_stats[unsigned(c)].length) for c in scf_seq])
scf_length += (len(scf_seq) - 1) * cfg.vals["scaffold_gap"]
--- a/flye/config/configurator.py
+++ b/flye/config/configurator.py
@@ -12,7 +12,6 @@
import flye.utils.fasta_parser as fp
import flye.config.py_cfg as cfg
-from six import iteritems
logger = logging.getLogger()
@@ -37,7 +36,7 @@
passing_reads = 0
for read_file in args.reads:
- for _, seq_len in iteritems(fp.read_sequence_lengths(read_file)):
+ for _, seq_len in fp.read_sequence_lengths(read_file).items():
if seq_len > MAX_READ_LEN:
raise ConfigException("Length of single read in '{}' exceeded maximum ({})".format(read_file, MAX_READ_LEN))
if seq_len > lowest_read_len:
--- a/flye/polishing/alignment.py
+++ b/flye/polishing/alignment.py
@@ -18,7 +18,6 @@
import flye.utils.fasta_parser as fp
from flye.utils.utils import which, get_median
from flye.utils.sam_parser import AlignmentException
-from six import iteritems
@@ -47,7 +46,7 @@
def get_contigs_info(contigs_file):
contigs_info = {}
contigs_fasta = fp.read_sequence_dict(contigs_file)
- for ctg_id, ctg_seq in iteritems(contigs_fasta):
+ for ctg_id, ctg_seq in contigs_fasta.items():
contig_type = ctg_id.split("_")[0]
contigs_info[ctg_id] = ContigInfo(ctg_id, len(ctg_seq),
contig_type)
--- a/flye/polishing/consensus.py
+++ b/flye/polishing/consensus.py
@@ -11,7 +11,6 @@
import logging
from collections import defaultdict
-from six import itervalues
import multiprocessing
import traceback
@@ -160,10 +159,10 @@
pos_nucl = elem.nucl
ins_group.clear()
- for ins_str in itervalues(pos_insertions):
+ for ins_str in pos_insertions.values():
ins_group[ins_str] += 1
- match_and_del_num = sum(itervalues(pos_matches))
+ match_and_del_num = sum(pos_matches.values())
del_num = pos_matches["-"]
num_ins = len(pos_insertions)
--- a/flye/polishing/polish.py
+++ b/flye/polishing/polish.py
@@ -21,7 +21,6 @@
import flye.utils.fasta_parser as fp
from flye.utils.utils import which
import flye.config.py_cfg as cfg
-from six import iteritems
@@ -329,7 +328,7 @@
polished_fasta = {}
polished_stats = {}
polished_coverages = {}
- for ctg_id, seqs in iteritems(consensuses):
+ for ctg_id, seqs in consensuses.items():
seqs.sort(key=lambda p: (p[0], p[1]))
sorted_seqs = [p[3] for p in seqs]
bubble_coverages = [(len(p[3]), p[2]) for p in seqs]
--- a/flye/short_plasmids/circular_sequences.py
+++ b/flye/short_plasmids/circular_sequences.py
@@ -9,7 +9,6 @@
import flye.utils.fasta_parser as fp
from flye.utils.sam_parser import read_paf, read_paf_grouped
import logging
-from six import iteritems
logger = logging.getLogger()
@@ -47,7 +46,7 @@
def trim_circular_reads(circular_reads, unmapped_reads):
trimmed_circular_reads = dict()
- for i, (read, hit) in enumerate(iteritems(circular_reads)):
+ for i, (read, hit) in enumerate(circular_reads.items()):
sequence = unmapped_reads[read][:hit.target_start].upper()
trimmed_circular_reads["circular_read_" + str(i)] = sequence
--- a/flye/short_plasmids/unmapped_reads.py
+++ b/flye/short_plasmids/unmapped_reads.py
@@ -9,7 +9,6 @@
from flye.utils.sam_parser import read_paf_grouped
import logging
from collections import defaultdict
-from six import iteritems
logger = logging.getLogger()
@@ -76,7 +75,7 @@
contigs = mapping_rates.get(hdr)
if contigs is not None:
is_unmapped = True
- for _, mapping_rate in iteritems(contigs):
+ for _, mapping_rate in contigs.items():
if mapping_rate >= mapping_rate_threshold:
is_unmapped = False
--- a/flye/trestle/graph_resolver.py
+++ b/flye/trestle/graph_resolver.py
@@ -13,7 +13,6 @@
import flye.utils.fasta_parser as fp
from flye.repeat_graph.graph_alignment import iter_alignments
-from six import iteritems
logger = logging.getLogger()
@@ -146,7 +145,7 @@
def dump_repeats(repeats_info, filename):
with open(filename, "w") as f:
- for repeat_id, info in iteritems(repeats_info):
+ for repeat_id, info in repeats_info.items():
f.write("#Repeat {0}\t{1}\n\n".format(repeat_id, info.multiplicity))
f.write("#All reads\t{0}\n".format(len(info.all_reads)))
--- a/flye/utils/sam_parser.py
+++ b/flye/utils/sam_parser.py
@@ -33,7 +33,6 @@
_BYTES = str.encode
-from six import iteritems
import flye.utils.fasta_parser as fp
from flye.utils.utils import get_median
@@ -209,7 +208,7 @@
#self.shared_manager = multiprocessing.Manager()
self.ref_fasta = dict() if multiproc_manager == None else multiproc_manager.dict()
- for (h, s) in iteritems(reference_fasta):
+ for (h, s) in reference_fasta.items():
self.ref_fasta[_BYTES(h)] = _BYTES(s)
def get_region_sequence(self, region_id, region_start=None, region_end=None):
|