File: check-implementer-notes.py

package info (click to toggle)
libreoffice 1%3A7.0.4-4%2Bdeb11u10
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 3,255,188 kB
  • sloc: cpp: 4,130,031; xml: 364,887; java: 276,583; python: 65,680; ansic: 36,276; perl: 32,034; javascript: 16,964; yacc: 10,836; sh: 10,721; makefile: 9,112; cs: 6,600; objc: 1,972; lex: 1,887; awk: 1,002; pascal: 940; asm: 928; php: 79; csh: 20; sed: 5
file content (31 lines) | stat: -rwxr-xr-x 1,399 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
#!/usr/bin/env python

import json, re, subprocess, sys, urllib3

http = urllib3.PoolManager()

# TDF implementer notes pages for LibreOffice
wiki_pages = [
    'https://wiki.documentfoundation.org/api.php?action=parse&format=json&page=Development/ODF_Implementer_Notes/List_of_LibreOffice_ODF_Extensions&prop=wikitext',
    'https://wiki.documentfoundation.org/api.php?action=parse&format=json&page=Development/ODF_Implementer_Notes/List_of_LibreOffice_OpenFormula_Extensions&prop=wikitext']

# get all commit hashes mentioned in implementer notes
wiki_commit_hashes = {}
query = re.compile('\{\{commit\|(\\w+)\|\\w*\|\\w*\}\}', re.IGNORECASE)
for page in wiki_pages:
    r = http.request('GET', page)
    data = json.loads(r.data.decode('utf-8'))
    for line in data['parse']['wikitext']['*'].split('\n'):
        for res in query.finditer(line):
            wiki_commit_hashes[res.group(1)] = ''

# get all commits that change core/schema/* - and are _not_ mentioned
# in the wiki page
# Cut-off is May 18th 2020, when Michael Stahl had finished cleaning this up
for commit in subprocess.check_output(
        ['git', '--no-pager', '-C', sys.path[0]+'/..', 'log',
         '--since=2020-05-18', '--format=%H', '--', 'schema/'],
        stderr=subprocess.STDOUT).decode("utf-8").split("\n"):
    if commit != '' and commit not in wiki_commit_hashes:
        print('missing commit: %s' % commit)