summaryrefslogtreecommitdiffstats
path: root/bin/reproducible_scheduler.py
blob: 85e3d60a349b80b1c5f0f187a422baba6e8ae0cc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright © 2015 Mattia Rizzolo <mattia@mapreri.org>
# Copyright © 2015 Holger Levsen <holger@layer-acht.org>
# Based on reproducible_scheduler.sh © 2014-2015 Holger Levsen <holger@layer-acht.org>
# Licensed under GPL-2
#
# Depends: python3 python3-debian
#
# Schedule packages to be build.

import sys
import lzma
import deb822
import aptsources.sourceslist
import random
from time import sleep
from random import randint
from subprocess import call
from apt_pkg import version_compare
from urllib.request import urlopen

from reproducible_common import *
from reproducible_html_indexes import generate_schedule
from reproducible_html_packages import gen_packages_html
from reproducible_html_packages import purge_old_pages

def call_apt_update(suite):
    # try three times, before failing the job
    for i in [1, 2, 3]:
        to_call =['schroot', '--directory', '/root', '-u', 'root', \
                  '-c', 'source:jenkins-reproducible-'+suite, '--', \
                  'apt-get', 'update']
        log.debug('calling ' + ' '.join(to_call))
        if not call(to_call):
            return
        else:
            log.warning('`apt-get update` failed. Retrying another ' + str(3-i)
                        + ' times.')
            sleep(randint(1, 70) + 30)
    print_critical_message('`apt-get update` for suite ' + suite +
                           ' failed three times in a row, giving up.')
    sys.exit(1)


def update_sources_tables(suite):
    # download the sources file for this suite
    mirror = 'http://ftp.de.debian.org/debian'
    remotefile = mirror + '/dists/' + suite + '/main/source/Sources.xz'
    log.info('Downloading sources file for ' + suite + ': ' + remotefile)
    sources = lzma.decompress(urlopen(remotefile).read()).decode('utf8')
    log.debug('\tdownloaded')
    # extract relevant info (package name and version) from the sources file
    new_pkgs = []
    for src in deb822.Sources.iter_paragraphs(sources.split('\n')):
        pkg = (src['Package'], src['Version'], suite)
        new_pkgs.append(pkg)
    # get the current packages in the database
    query = 'SELECT name, version, suite FROM sources ' + \
            'WHERE suite="{}"'.format(suite)
    cur_pkgs = query_db(query)
    pkgs_to_add = []
    updated_pkgs = []
    different_pkgs = [x for x in new_pkgs if x not in cur_pkgs]
    log.debug('Packages different in the archive and in the db: ' +
              str(different_pkgs))
    for pkg in different_pkgs:
        query = 'SELECT id, version, notify_maintainer FROM sources ' + \
                'WHERE name="{name}" AND suite="{suite}"'
        query = query.format(name=pkg[0], suite=pkg[2])
        try:
            result = query_db(query)[0]
        except IndexError:  # new package
            pkgs_to_add.append((pkg[0], pkg[1], pkg[2], 'amd64'))
            continue
        pkg_id = result[0]
        old_version = result[1]
        notify_maint = int(result[2])
        if version_compare(pkg[1], old_version) > 0:
            log.debug('New version: ' + str(pkg) + ' (we had  ' +
                      old_version + ')')
            updated_pkgs.append((pkg_id, pkg[0], pkg[1], pkg[2], notify_maint))
    # Now actually update the database:
    cursor = conn_db.cursor()
    # updated packages
    log.info('Pushing ' + str(len(updated_pkgs)) +
             ' updated packages to the database...')
    cursor.executemany(
        'REPLACE INTO sources ' +
        '(id, name, version, suite, architecture, notify_maintainer) ' +
        'VALUES (?, ?, ?, ?, "{arch}", ?)'.format(arch='amd64'),
        updated_pkgs)
    conn_db.commit()
    # new packages
    log.info('Now inserting ' + str(len(pkgs_to_add)) +
             ' new sources in the database: ' +
             str(pkgs_to_add))
    cursor.executemany('INSERT INTO sources ' +
                       '(name, version, suite, architecture) ' +
                       'VALUES (?, ?, ?, ?)', pkgs_to_add)
    conn_db.commit()
    # RM'ed packages
    cur_pkgs_name = [x[0] for x in cur_pkgs]
    new_pkgs_name = [x[0] for x in new_pkgs]
    rmed_pkgs = [x for x in cur_pkgs_name if x not in new_pkgs_name]
    log.info('Now deleting ' + str(len(rmed_pkgs)) +
             ' removed packages: ' + str(rmed_pkgs))
    rmed_pkgs_id = []
    pkgs_to_rm = []
    for pkg in rmed_pkgs:
        result = query_db(('SELECT id FROM sources ' +
                          'WHERE name="{name}" ' +
                          'AND suite="{suite}"').format(name=pkg, suite=suite))
        rmed_pkgs_id.extend(result)
        pkgs_to_rm.append((pkg, suite, 'amd64'))
    log.debug('removed packages ID: ' + str([str(x[0]) for x in rmed_pkgs_id]))
    log.debug('removed packages: ' + str(pkgs_to_rm))
    cursor.executemany('DELETE FROM sources ' +
                       'WHERE id=?', rmed_pkgs_id)
    cursor.executemany('DELETE FROM results ' +
                       'WHERE package_id=?', rmed_pkgs_id)
    cursor.executemany('DELETE FROM schedule ' +
                       'WHERE package_id=?', rmed_pkgs_id)
    cursor.executemany('INSERT INTO removed_packages '  +
                       '(name, suite, architecture) ' +
                       'VALUES (?, ?, ?)', pkgs_to_rm)
    conn_db.commit()
    # finally check whether the db has the correct number of packages
    pkgs_end = query_db('SELECT count(*) FROM sources WHERE suite="%s"' % suite)
    count_new_pkgs = len(set([x[0] for x in new_pkgs]))
    if int(pkgs_end[0][0]) != count_new_pkgs:
        print_critical_message('AH! The number of source in the Sources file' +
                               ' is different than the one in the DB!')
        log.critical('source in the debian archive for the ' + suite +
                     ' suite:' + str(count_new_pkgs))
        log.critical('source in the reproducible db for the ' + suite +
                     ' suite:' + str(pkgs_end[0][0]))
        sys.exit(1)
    if pkgs_to_add:
        log.info('Building pages for the new packages')
        gen_packages_html(pkgs_to_add, no_clean=True)


def print_schedule_result(suite, criteria, packages):
    '''
    `packages` is the usual list-of-tuples returned by SQL queries,
    where the first item is the id and the second one the package name
    '''
    log.info('--------------------------------------------------------------')
    log.info('Criteria: ' + criteria)
    log.info('Suite:    ' + suite)
    log.info('Amount:   ' + str(len(packages)))
    log.info('Packages: ' + ' '.join([x[1] for x in packages]))


def schedule_packages(packages, date):
    date = date.strftime('%Y-%m-%d %H:%M')
    pkgs = [(x[0], date) for x in packages]
    log.debug('IDs about to be scheduled: ' + str([x[0] for x in packages]))
    query = 'INSERT INTO schedule ' + \
            '(package_id, date_scheduled, date_build_started) ' + \
            'VALUES (?, ?, "")'
    cursor = conn_db.cursor()
    cursor.executemany(query, pkgs)
    conn_db.commit()
    log.info('--------------------------------------------------------------')
    log.info('The following ' + str(len(pkgs)) + ' source packages have ' +
             'been scheduled at ' + date + ': ' + ' '.join([str(x[1]) for x in packages]))
    log.info('--------------------------------------------------------------')


def scheduler_untested_packages(suite, limit):
    criteria = 'not tested before, randomly sorted'
    query = """SELECT DISTINCT sources.id, sources.name FROM sources
               WHERE sources.suite='{suite}'
               AND sources.id NOT IN
                       (SELECT schedule.package_id FROM schedule)
               AND sources.id NOT IN
                       (SELECT results.package_id FROM results)
               ORDER BY random()
               LIMIT {limit}""".format(suite=suite, limit=limit)
    packages = query_db(query)
    print_schedule_result(suite, criteria, packages)
    return packages


def scheduler_new_versions(suite, limit):
    criteria = 'tested before, new version available, sorted by last build date'
    query = """SELECT DISTINCT s.id, s.name, s.version, r.version
               FROM sources AS s JOIN results AS r ON s.id = r.package_id
               WHERE s.suite='{suite}'
               AND s.version != r.version
               AND r.status != 'blacklisted'
               AND s.id IN (SELECT package_id FROM results)
               AND s.id NOT IN (SELECT schedule.package_id FROM schedule)
               ORDER BY r.build_date
               LIMIT {limit}""".format(suite=suite, limit=limit)
    pkgs = query_db(query)
    # this is to avoid perpetual rescheduling of packages in our exp repository
    packages = [(x[0], x[1]) for x in pkgs if version_compare(x[2], x[3]) > 0]
    print_schedule_result(suite, criteria, packages)
    return packages


def scheduler_old_versions(suite, limit):
    criteria = 'tested at least two weeks ago, no new version available, ' + \
               'sorted by last build date'
    query = """SELECT DISTINCT s.id, s.name
                FROM sources AS s JOIN results AS r ON s.id = r.package_id
                WHERE s.suite='{suite}'
                AND r.status != 'blacklisted'
                AND r.build_date < datetime('now', '-14 day')
                AND s.id NOT IN (SELECT schedule.package_id FROM schedule)
                ORDER BY r.build_date
                LIMIT {limit}""".format(suite=suite, limit=limit)
    packages = query_db(query)
    print_schedule_result(suite, criteria, packages)
    return packages

def add_up_numbers(package_type):
    package_type_sum = '+'.join([str(len(package_type[x])) for x in SUITES])
    if package_type_sum == '0+0+0':
        package_type_sum = '0'
    return package_type_sum

def scheduler():
    query = 'SELECT count(*) ' + \
            'FROM schedule AS p JOIN sources AS s ON p.package_id=s.id '
    total = int(query_db(query)[0][0])
    log.info('Currently scheduled packages in all suites: ' + str(total))
    if total > 750:
        generate_schedule()  # from reproducible_html_indexes
        log.info(str(total) + ' packages already scheduled' +
                 ', nothing to do here.')
        return
    else:
        log.info(str(total) + ' packages already scheduled' +
                 ', scheduling some more...')
        log.info('==============================================================')
    # untested packages
    untested = {}
    for suite in SUITES:
        log.info('Requesting 444 untested packages in ' + suite + '...')
        untested[suite] = scheduler_untested_packages(suite, 444)
        total += len(untested[suite])
        log.info('Received ' + str(len(untested[suite])) + ' untested packages in ' + suite + ' to schedule.')
    log.info('==============================================================')

    # packages with new versions
    new = {}
    if total <= 100:
        many_new = 250
    elif total <= 200:
        many_new = 200
    else:
        many_new = 150
    log.info('Requesting ' + str(many_new) + ' new versions in ' + suite + '...')
    for suite in SUITES:
        new[suite] = scheduler_new_versions(suite, many_new)
        total += len(new[suite])
        log.info('Received ' + str(len(new[suite])) + ' new packages in ' + suite + ' to schedule.')
    log.info('==============================================================')

    # old packages
    old = {}
    if total <= 250:
        many_old_base = 35 # multiplied by 20 or 10 or 1, see below
    elif total <= 350:
        many_old_base = 25 # also...
    else:
        many_old_base = 0  # ...
    for suite in SUITES:
        if suite == 'unstable':
            suite_many_old = int(many_old_base*20) # unstable changes the most and is most relevant
        elif suite == 'testing':
            suite_many_old = int(many_old_base*10)  # re-schedule testing less than unstable as we care more more about unstable (atm)
        else:
            suite_many_old = int(many_old_base)    # experimental is roughly one twentieth of the size of the other suites
        log.info('Requesting ' + str(suite_many_old) + ' old packages in ' + suite + '...')
        old[suite] = scheduler_old_versions(suite, suite_many_old)
        total += len(old[suite])
        log.info('Received ' + str(len(old[suite])) + ' old packages in ' + suite + ' to schedule.')
    log.info('==============================================================')

    now_queued_here = {}
    # make sure to schedule packages in unstable first
    # (but keep the view ordering everywhere else)
    priotized_suite_order = ['unstable']
    for suite in SUITES:
        if suite not in priotized_suite_order:
            priotized_suite_order.append(suite)
    for suite in priotized_suite_order:
        query = 'SELECT count(*) ' + \
                'FROM schedule AS p JOIN sources AS s ON p.package_id=s.id ' + \
                'WHERE s.suite="{suite}"'.format(suite=suite)
        now_queued_here[suite] = int(query_db(query)[0][0]) + len(untested[suite]+new[suite]+old[suite])
        # schedule packages differently in the queue...
        schedule_packages(untested[suite], datetime.datetime.now())
        schedule_packages(new[suite], datetime.datetime.now()+datetime.timedelta(minutes=-720))
        schedule_packages(old[suite], datetime.datetime.now()+datetime.timedelta(minutes=720))
        log.info('### Suite ' + suite + ' done ###')
        log.info('==============================================================')
    # update the scheduled page
    generate_schedule()  # from reproducible_html_indexes
    # build the kgb message text
    message = 'Scheduled in ' + '+'.join(SUITES) + ': ' + \
              add_up_numbers(untested) + ' new and untested packages, ' + \
              add_up_numbers(new) + ' packages with new versions and ' + \
              add_up_numbers(old) + ' old packages with the same version, ' + \
              'for ' + str(total) + ' or ' + \
              '+'.join([str(now_queued_here[x]) for x in SUITES]) + ' packages in total.'
    log.info('\n\n\n')
    log.info(message)
    # only notifiy irc if there were packages scheduled in any suite
    for x in SUITES:
        if len(untested[x])+len(new[x])+len(old[x]) > 0:
            irc_msg(message)
            break


if __name__ == '__main__':
    log.info('Updating schroots and sources tables for all suites.')
    for suite in SUITES:
        call_apt_update(suite)
        update_sources_tables(suite)
    purge_old_pages()
    try:
        overall = int(query_db('SELECT count(*) FROM schedule')[0][0])
    except:
        overall = 9999
    if overall > 750:
        log.info(str(overall) + ' packages already scheduled, nothing to do.')
        sys.exit()
    log.info(str(overall) + ' packages already scheduled, scheduling some more...')
    scheduler()