1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189
|
Description: Remove code from Stack Overflow
Stack Overflow content is CC-BY-SA licensed,
which this package is not supposed to be. These snippets may be
too small to be copyrightable, but removing them to be safe.
Author: Rebecca N. Palmer <rebecca_palmer@zoho.com>
Forwarded: no - one part needs Python 3.6+
--- a/snakemake/io.py
+++ b/snakemake/io.py
@@ -1683,18 +1683,7 @@ def _load_configfile(configpath_or_obj,
except ValueError:
f.seek(0) # try again
try:
- # From https://stackoverflow.com/a/21912744/84349
- class OrderedLoader(yaml.Loader):
- pass
-
- def construct_mapping(loader, node):
- loader.flatten_mapping(node)
- return collections.OrderedDict(loader.construct_pairs(node))
-
- OrderedLoader.add_constructor(
- yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping
- )
- return yaml.load(f, Loader=OrderedLoader)
+ return yaml.full_load(f)
except yaml.YAMLError:
raise WorkflowError(
"Config file is not valid JSON or YAML. "
--- a/snakemake/remote/S3Mocked.py
+++ b/snakemake/remote/S3Mocked.py
@@ -10,6 +10,7 @@ import pickle
import time
import threading
import functools
+import pathlib
# intra-module
from snakemake.remote.S3 import (
@@ -122,26 +123,19 @@ class RemoteObject(S3RemoteObject):
# ====== Helpers =====
-def touch(fname, mode=0o666, dir_fd=None, **kwargs):
- # create lock file faster
- # https://stackoverflow.com/a/1160227
- flags = os.O_CREAT | os.O_APPEND
- with os.fdopen(os.open(fname, flags=flags, mode=mode, dir_fd=dir_fd)) as f:
- os.utime(
- f.fileno() if os.utime in os.supports_fd else fname,
- dir_fd=None if os.supports_fd else dir_fd,
- **kwargs
- )
-
+def touch(fname, mode=0o666):
+ pathlib.Path(fname).touch(mode=mode, exist_ok=True)
@contextmanager
def file_lock(filepath):
lock_file = filepath + ".lock"
- while os.path.isfile(lock_file):
- time.sleep(2)
-
- touch(lock_file)
+ while True:
+ try:
+ pathlib.Path(lock_file).touch(exist_ok=False)
+ break
+ except FileExistsError:
+ time.sleep(2)
try:
yield
--- a/snakemake/utils.py
+++ b/snakemake/utils.py
@@ -487,25 +487,23 @@ def min_version(version):
def update_config(config, overwrite_config):
"""Recursively update dictionary config with overwrite_config.
- See
- https://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
- for details.
-
Args:
config (dict): dictionary to update
overwrite_config (dict): dictionary whose items will overwrite those in config
"""
- def _update(d, u):
- for (key, value) in u.items():
- if isinstance(value, collections.abc.Mapping):
- d[key] = _update(d.get(key, {}), value)
+ def nestedupdate(old_d, new_d):
+ for k, v in new_d.items():
+ if isinstance(v, collections.abc.Mapping):
+ if k not in old_d:
+ old_d[k] = {}
+ nestedupdate(old_d[k], v)
else:
- d[key] = value
- return d
+ old_d[k] = v
+
- _update(config, overwrite_config)
+ nestedupdate(config, overwrite_config)
def available_cpu_count():
@@ -514,22 +512,16 @@ def available_cpu_count():
The number of available CPUs can be smaller than the total number of CPUs
when the cpuset(7) mechanism is in use, as is the case on some cluster
systems.
-
- Adapted from https://stackoverflow.com/a/1006301/715090
"""
try:
- with open("/proc/self/status") as f:
- status = f.read()
- m = re.search(r"(?m)^Cpus_allowed:\s*(.*)$", status)
- if m:
- res = bin(int(m.group(1).replace(",", ""), 16)).count("1")
- if res > 0:
- return min(res, multiprocessing.cpu_count())
- except IOError:
- pass
-
- return multiprocessing.cpu_count()
-
+ cpu_count = len(os.sched_getaffinity(0))
+ except Exception:
+ cpu_count = 0
+ if cpu_count == 0:
+ cpu_count = os.cpu_count()
+ if not cpu_count: # None if unknown
+ cpu_count = 1
+ return cpu_count
def argvquote(arg, force=True):
"""Returns an argument quoted in such a way that CommandLineToArgvW
--- a/tests/test_static_remote/S3MockedForStaticTest.py
+++ b/tests/test_static_remote/S3MockedForStaticTest.py
@@ -10,6 +10,7 @@ import pickle
import time
import threading
import functools
+import pathlib
# intra-module
from snakemake.remote.S3 import (
@@ -133,26 +134,20 @@ class RemoteObject(S3RemoteObject):
# ====== Helpers =====
-def touch(fname, mode=0o666, dir_fd=None, **kwargs):
- # create lock file faster
- # https://stackoverflow.com/a/1160227
- flags = os.O_CREAT | os.O_APPEND
- with os.fdopen(os.open(fname, flags=flags, mode=mode, dir_fd=dir_fd)) as f:
- os.utime(
- f.fileno() if os.utime in os.supports_fd else fname,
- dir_fd=None if os.supports_fd else dir_fd,
- **kwargs
- )
+def touch(fname, mode=0o666):
+ pathlib.Path(fname).touch(mode=mode, exist_ok=True)
@contextmanager
def file_lock(filepath):
lock_file = filepath + ".lock"
- while os.path.isfile(lock_file):
- time.sleep(2)
-
- touch(lock_file)
+ while True:
+ try:
+ pathlib.Path(lock_file).touch(exist_ok=False)
+ break
+ except FileExistsError:
+ time.sleep(2)
try:
yield
|