aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/skqp/download_model
blob: c18ebfa428b9bad109585469e01a11f5a9ead220 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
#! /usr/bin/env python

# Copyright 2018 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import hashlib
import multiprocessing
import os
import shutil
import sys
import tempfile
import urllib2

def checksum(path):
    if not os.path.exists(path):
        return None
    m = hashlib.md5()
    with open(path, 'rb') as f:
        while True:
            buf = f.read(4096)
            if 0 == len(buf):
                return m.hexdigest()
            m.update(buf)

def download(md5, path):
    if not md5 == checksum(path):
        dirname = os.path.dirname(path)
        if dirname and not os.path.exists(dirname):
            try:
                os.makedirs(dirname)
            except:
                # ignore race condition
                if not os.path.exists(dirname):
                    raise
        url = 'https://storage.googleapis.com/skia-skqp-assets/' + md5
        with open(path, 'wb') as o:
            shutil.copyfileobj(urllib2.urlopen(url), o)

def tmp(prefix):
    fd, path = tempfile.mkstemp(prefix=prefix)
    os.close(fd)
    return path

def main():
    os.chdir(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
                         'platform_tools', 'android', 'apps', 'skqp', 'src',
                         'main', 'assets'))
    file_list_file = tmp('files_')
    with open('files.checksum', 'r') as f:
        md5 = f.read().strip()
        assert(len(md5) == 32)
        download(md5, file_list_file)
    with open(file_list_file, 'r') as f:
        records = []
        for line in f:
            md5, path = line.strip().split(';', 1)
            records.append((md5, path))
    sys.stderr.write('Downloading %d files.\n' % len(records))
    pool = multiprocessing.Pool(processes=multiprocessing.cpu_count() * 2)
    for record in records:
        pool.apply_async(download, record, callback=lambda x: sys.stderr.write('.'))
    pool.close()
    pool.join()
    sys.stderr.write('\n')

if __name__ == '__main__':
    main()