File: version_bumps.py

package info (click to toggle)
sunpy 7.0.3-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 12,584 kB
  • sloc: python: 41,702; ansic: 1,710; makefile: 39
file content (187 lines) | stat: -rw-r--r-- 7,049 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
import collections
from packaging.version import Version
import requests
from datetime import datetime, timedelta
from dateutil.parser import parse as parse_date
from importlib import metadata
from packaging.requirements import Requirement
from functools import cache
import argparse


@cache
def get_package_releases(package):
    """Fetch all versions and their release dates for a package from PyPI"""
    print(f"Querying pypi.org for {package} versions...", end="", flush=True)
    response = requests.get(
        f"https://pypi.org/simple/{package}",
        headers={"Accept": "application/vnd.pypi.simple.v1+json"},
    ).json()
    print("OK")
    file_date = collections.defaultdict(list)
    for f in response["files"]:
        ver = f["filename"].split("-")[1]
        try:
            version = Version(ver)
        except:
            continue
        release_date = None
        for format in ["%Y-%m-%dT%H:%M:%S.%fZ", "%Y-%m-%dT%H:%M:%SZ"]:
            try:
                release_date = datetime.strptime(f["upload-time"], format)
            except ValueError:
                continue
        if not release_date:
            continue
        file_date[version].append(release_date)
    release_date = {v: min(file_date[v]) for v in file_date}
    return release_date


def is_version_old(package, version_str, threshold=timedelta(days=365*2)):
    """Check if a specific version of a package is older than the threshold"""
    releases = get_package_releases(package)
    # Find the release date for the specified version
    target_release_date = None
    for v, date in releases.items():
        if v == Version(version_str):
            target_release_date = date
            break
    if not target_release_date:
        print(f"Did not find version {version_str}")
        return True
    now = datetime.now()
    return (now - target_release_date) > threshold


def find_newest_version(package, threshold=timedelta(days=365*2)):
    """Find the oldest available version that is not older than the threshold"""
    releases = get_package_releases(package)
    # Sort releases by date to easily find the most recent ones
    sorted_releases = dict(sorted(releases.items(), key=lambda x: x[0], reverse=False))
    # Find the first release within the threshold
    for v, date in sorted_releases.items():
        if v.is_prerelease or v.micro != 0:
            continue
        now = datetime.now()
        if (now - date) <= threshold:
            return v, date
    return None, None  # No recent versions found


def get_min_version(requirement):
    """
    Extracts the minimum version from a requirement.

    Parameters:
        requirement (packaging.requirements.Requirement): The requirement object.

    Returns:
        str or None: The minimum version if found, otherwise None.
    """
    spec = requirement.specifier
    # Check for specific version constraints
    versions = []
    # Convert the specifier to a string and split by known operators
    spec_str = str(spec)
    operators = ['==', '>=', '>', '<=', '<', '!=']
    # Iterate over each operator to find matching parts in the spec string
    for op in operators:
        if op in spec_str:
            parts = spec_str.split(op)
            if len(parts) > 1 and not any(o in parts[0] for o in operators):
                versions.append((op, parts[1].strip("'\"")))
    # Determine the minimum version based on the extracted parts
    min_version = None
    for op, ver in versions:
        if op == '==' or (op.startswith(('>=', '>')) and not min_version):
            min_version = ver
    return min_version


def get_requirements(package):
    """
    This wraps `importlib.metadata.requires` to not be garbage.

    Parameters
    ----------
    package : str
        Package you want requirements for.

    Returns
    -------
    `dict`
        A dictionary of requirements with keys being the extra requirement group names.
        The values are a nested dictionary with keys being the package names and
        values being the `packaging.requirements.Requirement` objects.
    """
    requirements: list = metadata.requires(package)
    requires_dict = collections.defaultdict(dict)
    for requirement in requirements:
        req = Requirement(requirement)
        package_name, package_marker = req.name, req.marker
        if package_marker and "extra ==" in str(package_marker):
            group = str(package_marker).split("extra == ")[1].strip('"').strip("'").strip()
        else:
            group = "required"
        # De-duplicate (the same package could appear more than once in the extra == 'all' group)
        if package_name in requires_dict[group]:
            continue
        requires_dict[group][package_name] = req
    return requires_dict


def process_dependencies(package, threshold=timedelta(days=365*2)):
    """
    Processes all dependencies to check their versions against the threshold.

    Parameters:
        package (str): The name of the package.
        requirements_dict (dict): Dictionary returned by get_requirements().

    Returns:
        list: A list of dictionaries containing 'package', 'version',
              'group', and 'is_old' for each dependency.
    """
    requirements_dict = get_requirements(package)
    result = collections.defaultdict(list)
    for group, deps in requirements_dict.items():
        for package_name, req in deps.items():
            min_version = get_min_version(req)
            if min_version:
                is_old = is_version_old(package_name, min_version, threshold=threshold)
                result[group].append({
                    'package': package_name,
                    'version': min_version,
                    'group': group,
                    'is_old': is_old
                })
    return result


def output_version_bumps(package, threshold=timedelta(days=365*2)):
    group_deps = process_dependencies(package)
    for group, deps in group_deps.items():
        print(f"\n{group}\n{'-'*len(group)}")
        for dep in deps:
            if dep['is_old']:
                new_version, release_date = find_newest_version(dep['package'], threshold=threshold)
                if new_version is not None:
                    print(f"{dep['package']} should be bumped to {new_version} which was released on {release_date:%Y-%m-%d}")
                else:
                    print(f"Could not find newer version for {dep['package']}")


def main():
    parser = argparse.ArgumentParser(description='Process package dependancies for minimum bumps.')
    # Required positional argument
    parser.add_argument('--packagename', type=str, default="sunpy", help='The name of the package')
    # Optional integer argument with default value
    parser.add_argument('--threshold', type=int, default=720,
                       help='An optional threshold value in days (default: 720)')
    args = parser.parse_args()
    output_version_bumps(args.packagename, threshold=timedelta(days=args.threshold))


if __name__ == "__main__":
    main()