File: twofactorauth2sql.py

package info (click to toggle)
gnome-authenticator 4.6.2-10
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 3,624 kB
  • sloc: sql: 2,359; python: 147; xml: 34; makefile: 18; sh: 8
file content (126 lines) | stat: -rw-r--r-- 4,632 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
#!/usr/bin/env python3
"""
YAML database to JSON converter.
"""
import json
import tempfile
from glob import glob
from os import path, remove
from shutil import rmtree
from subprocess import call
from urllib.parse import urlparse

GIT_CLONE_URI = "https://github.com/2factorauth/twofactorauth"
TMP_FOLDER = path.join(tempfile.gettempdir(), "Authenticator")
DATA_DIR = path.join(TMP_FOLDER, "entries")

LAST_DATA = path.realpath(
    path.join(
        path.dirname(path.realpath(__file__)),
        "../migrations/2019-09-02-132153_fill_providers/data.json",
    )
)

with open(LAST_DATA, "r") as f:
    current_data = json.load(f)


print("Cloning the repository...")
if path.exists(TMP_FOLDER):
    rmtree(TMP_FOLDER)
call(["git", "clone", "--depth=1", GIT_CLONE_URI, TMP_FOLDER])


def is_valid(provider: dict) -> bool:
    return "totp" in provider.get("tfa", [])


def compare_url(website1: str, website2: str) -> bool:
    w1 = urlparse(website1)
    w2 = urlparse(website2)
    return w1.netloc.lstrip("www.").rstrip("/") == w2.netloc.lstrip("www.").rstrip("/")


def find_entry(current_data: dict, name: str, website: str) -> dict:
    for entry in current_data:
        if entry["name"] == name or (
            website and entry["website"] and compare_url(website, entry["website"])
        ):
            return entry


output = {}

down_query = ""
up_query = ""

for db_file in glob(DATA_DIR + "/**/*.json"):
    with open(db_file, "r", encoding="utf8") as file_data:
        try:
            data = json.load(file_data)
            provider = list(data.values())[0]
            name = list(data.keys())[0].replace("&", "&")
            if is_valid(provider):
                website = provider.get("domain", "")
                if not website.startswith("http"):
                    website = f"https://www.{website}/"
                help_url = provider.get("documentation", "")
                old_entry = find_entry(current_data, name, website)

                if old_entry is not None:
                    update_entries = []
                    downgrade_entries = []
                    if not compare_url(website, old_entry["website"]):
                        update_entries.append(("website", website))
                        downgrade_entries.append(("website", old_entry["website"]))
                    if (
                        help_url
                        and old_entry["documentation"]
                        and not compare_url(help_url, old_entry["documentation"])
                    ):
                        update_entries.append(("help_url", help_url))
                        downgrade_entries.append(
                            ("help_url", old_entry["documentation"])
                        )

                    if name != old_entry["name"]:
                        up_condition = f'name="{old_entry["name"]}"'
                        down_condition = f'name="{name}"'
                        update_entries.append(("name", name))
                        downgrade_entries.append(("name", old_entry["name"]))
                    else:
                        up_condition = f'name="{name}"'
                        down_condition = f'name="{old_entry["name"]}"'

                    if len(update_entries) > 0:
                        up_columns = ""
                        i = 0
                        for (column, value) in update_entries:
                            up_columns += f'{column}="{value}"'
                            if i != len(update_entries) - 1:
                                up_columns += ", "
                            i += 1
                        down_columns = ""
                        i = 0
                        for (column, value) in downgrade_entries:
                            down_columns += f'{column}="{value}"'
                            if i != len(downgrade_entries) - 1:
                                down_columns += ", "
                            i += 1

                        up_query += f'UPDATE "providers" SET {up_columns} WHERE {up_condition};\n'
                        down_query += f'UPDATE "providers" SET {down_columns} WHERE {down_condition};\n'
                else:
                    up_query += f'INSERT INTO "providers" ("name", "website", "help_url") VALUES ("{name}", "{website}", "{help_url}");\n'
                    down_query += f'DELETE FROM "providers" WHERE "name"="{name}";\n'
        except (TypeError, KeyError) as error:
            print(error)

with open("./up.sql", "w") as fo:
    fo.write(up_query)


with open("./down.sql", "w") as fo:
    fo.write(down_query)

rmtree(TMP_FOLDER)