File: getall.go

package info (click to toggle)
golang-github-klauspost-cpuid 1.2.0%2Bdfsg1-1
  • links: PTS, VCS
  • area: main
  • in suites: buster
  • size: 504 kB
  • sloc: asm: 62; makefile: 2
file content (77 lines) | stat: -rw-r--r-- 1,330 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
package main

import (
	"archive/zip"
	_ "bytes"
	"fmt"
	"golang.org/x/net/html"
	"io"
	"net/http"
	"os"
	"strings"
)

// Download all CPUID dumps from http://users.atw.hu/instlatx64/
func main() {
	resp, err := http.Get("http://users.atw.hu/instlatx64/?")
	if err != nil {
		panic(err)
	}

	node, err := html.Parse(resp.Body)
	if err != nil {
		panic(err)
	}

	file, err := os.Create("cpuid_data.zip")
	if err != nil {
		panic(err)
	}
	defer file.Close()
	gw := zip.NewWriter(file)

	var f func(*html.Node)
	f = func(n *html.Node) {
		if n.Type == html.ElementNode && n.Data == "a" {
			for _, a := range n.Attr {
				if a.Key == "href" {
					err := ParseURL(a.Val, gw)
					if err != nil {
						panic(err)
					}
					break
				}
			}
		}
		for c := n.FirstChild; c != nil; c = c.NextSibling {
			f(c)
		}
	}

	f(node)
	err = gw.Close()
	if err != nil {
		panic(err)
	}
}

func ParseURL(s string, gw *zip.Writer) error {
	if strings.Contains(s, "CPUID.txt") {
		fmt.Println("Adding", "http://users.atw.hu/instlatx64/"+s)
		resp, err := http.Get("http://users.atw.hu/instlatx64/" + s)
		if err != nil {
			fmt.Println("Error getting ", s, ":", err)
		}
		defer resp.Body.Close()
		w, err := gw.Create(s)
		if err != nil {
			return err
		}

		_, err = io.Copy(w, resp.Body)
		if err != nil {
			return err
		}
	}
	return nil
}