1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright © 2015 Mattia Rizzolo <mattia@mapreri.org>
# Based on variuos reproducible_* files © 2014 Holger Levsen <holger@layer-acht.org>
# Licensed under GPL-2
#
# Depends: python3
#
# Track the database schema and changes to it. plus allow a simple creation
# and migration of it.
from reproducible_common import *
now = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
# the original schema is here
db_schema = [
{
'name': 'rb_schema',
'query': ['''CREATE TABLE rb_schema
(version INTEGER NOT NULL,
date TEXT NOT NULL,
PRIMARY KEY (version))''',
'INSERT INTO rb_schema VALUES ("1", "''' + now + '")']
},
{
'name': 'source_packages',
'query': ['''CREATE TABLE source_packages
(name TEXT NOT NULL,
version TEXT NOT NULL,
status TEXT NOT NULL
CHECK
(status IN
("blacklisted", "FTBFS", "reproducible",
"unreproducible", "404", "not for us")
),
build_date TEXT NOT NULL,
PRIMARY KEY (name))''']
},
{
'name': 'sources_scheduled',
'query': ['''CREATE TABLE sources_scheduled
(name TEXT NOT NULL,
date_scheduled TEXT NOT NULL,
date_build_started TEXT NOT NULL,
PRIMARY KEY (name))''']
},
{
'name': 'sources',
'query': ['''CREATE TABLE sources
(name TEXT NOT NULL,
version TEXT NOT NULL)''']
},
{
'name': 'stats_pkg_state',
'query': ['''CREATE TABLE stats_pkg_state
(datum TEXT NOT NULL,
suite TEXT NOT NULL,
untested INTEGER,
reproducible INTEGER,
unreproducible INTEGER,
FTBFS INTEGER,
other INTEGER,
PRIMARY KEY (datum))''']
},
{
'name': 'stats_builds_per_day',
'query': ['''CREATE TABLE stats_builds_per_day
(datum TEXT NOT NULL,
suite TEXT NOT NULL,
reproducible INTEGER,
unreproducible INTEGER,
FTBFS INTEGER,
other INTEGER,
PRIMARY KEY (datum))''']
},
{
'name': 'stats_builds_age',
'query': ['''CREATE TABLE stats_builds_age
(datum TEXT NOT NULL,
suite TEXT NOT NULL,
oldest_reproducible REAL,
oldest_unreproducible REAL,
oldest_FTBFS REAL,
PRIMARY KEY (datum))''']
},
{
'name': 'stats_bugs',
'query': ['''CREATE TABLE stats_bugs
(datum TEXT NOT NULL,
open_toolchain INTEGER,
done_toolchain INTEGER,
open_infrastructure INTEGER,
done_infrastructure INTEGER,
open_timestamps INTEGER,
done_timestamps INTEGER,
open_fileordering INTEGER,
done_fileordering INTEGER,
open_buildpath INTEGER,
done_buildpath INTEGER,
open_username INTEGER,
done_username INTEGER,
open_hostname INTEGER,
done_hostname INTEGER,
open_uname INTEGER,
done_uname INTEGER,
open_randomness INTEGER,
done_randomness INTEGER,
open_buildinfo INTEGER,
done_buildinfo INTEGER,
open_cpu INTEGER,
done_cpu INTEGER,
PRIMARY KEY (datum))''']
},
{
'name': 'stats_notes',
'query': ['''CREATE TABLE stats_notes
(datum TEXT NOT NULL,
packages_with_notes INTEGER,
PRIMARY KEY (datum))''']
},
{
'name': 'stats_issues',
'query': ['''CREATE TABLE stats_issues
(datum TEXT NOT NULL,
known_issues INTEGER,
PRIMARY KEY (datum))''']
},
{
'name': 'stats_meta_pkg_state',
'query': ['''CREATE TABLE stats_meta_pkg_state
(datum TEXT NOT NULL,
suite TEXT NOT NULL,
meta_pkg TEXT NOT NULL,
reproducible INTEGER,
unreproducible INTEGER,
FTBFS INTEGER,
other INTEGER,
PRIMARY KEY (datum, suite, meta_pkg))''']
}
]
# and here there are a list of queries, split by update, that can be used to
# update the live schema
schema_updates = {
1: ['INSERT INTO rb_schema VALUES ("1", "' + now + '")'],
2: [ # do a funny dance to add an id, suite and architecture values to
# the `suites` table
'''CREATE TABLE sources_new_tmp
(id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
version TEXT NOT NULL,
suite TEXT,
architecture TEXT,
UNIQUE (name, suite, architecture)
ON CONFLICT REPLACE)''',
'''CREATE TABLE sources_new
(id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
version TEXT NOT NULL,
suite TEXT NOT NULL,
architecture TEXT NOT NULL,
UNIQUE (name, suite, architecture)
ON CONFLICT REPLACE)''',
'INSERT INTO sources_new_tmp (name, version) SELECT * FROM sources',
'UPDATE sources_new_tmp SET suite="sid", architecture="amd64"',
'INSERT INTO sources_new SELECT * FROM sources_new_tmp',
'DROP TABLE sources_new_tmp',
'DROP TABLE sources',
'ALTER TABLE sources_new RENAME TO sources',
# now that we have an id in `sources` rework all tables to join
# against this table, and avoid duplicating data
# `schedule`:
'''CREATE TABLE schedule
(id INTEGER PRIMARY KEY,
package_id INTEGER NOT NULL,
date_scheduled TEXT NOT NULL,
date_build_started TEXT NOT NULL,
save_artifacts INTEGER DEFAULT 0,
UNIQUE (package_id),
FOREIGN KEY(package_id) REFERENCES sources(id))''',
'''INSERT INTO schedule (package_id, date_scheduled, date_build_started)
SELECT s.id, p.date_scheduled, p.date_build_started
FROM sources AS s JOIN sources_scheduled AS p ON s.name = p.name''',
'DROP TABLE sources_scheduled',
# `results`:
'''CREATE TABLE results
(id INTEGER PRIMARY KEY,
package_id INTEGER NOT NULL,
version TEXT NOT NULL,
status TEXT,
build_date TEXT,
build_duration TEXT DEFAULT '0',
UNIQUE (package_id)
FOREIGN KEY(package_id) REFERENCES sources(id))''',
'''INSERT INTO results (package_id, version, status, build_date)
SELECT s.id, r.version, r.status, r.build_date
FROM sources AS s JOIN source_packages as r ON s.name = r.name''',
'DROP TABLE source_packages',
# `stats_builds`: (completely new table where we save every build)
'''CREATE TABLE stats_build
(id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
version TEXT NOT NULL,
suite TEXT NOT NULL,
architecture TEXT NOT NULL,
status TEXT NOT NULL,
build_date TEXT NOT NULL,
build_duration TEXT NOT NULL,
UNIQUE (name, version, suite, architecture, build_date))''',
'INSERT INTO rb_schema VALUES ("2", "' + now + '")']
}
def db_create_tables():
"""
Check wheter all tables are present, and, if not, create them.
The check is done against sqlite_master, a reserved sqlite table
containing all database metadata.
"""
for table in db_schema:
query = 'SELECT name FROM sqlite_master WHERE name="{}"'
query = query.format(table['name'])
if not query_db(query):
log.warning(table['name'] + ' does not exists. Creating...')
for query in table['query']:
log.info('\t' + re.sub(' +', ' ', query.replace('\n', ' ')))
query_db(query)
def db_update():
"""
Update the database schema.
It get a list of query to perform from schema_updates, and (if needed)
some python code from the above run_update_code().
The need for an update is detect by checking the biggest value in the
rb_schema table against the biggest value in the schema_updates dictionary
"""
current = query_db('SELECT MAX(version) FROM rb_schema')[0][0]
if not current:
log.warning('This is probably a new database, there are no ' +
'previous updates noted')
current = 0
last = max(schema_updates.keys())
if current == last:
return
if current > last:
print_critiacal_message('The active database schema is higher than' +
' the last update available.\nPlease check!')
sys.exit(1)
log.info('Found schema updates.')
for update in range(current+1, last+1):
log.info('Applying database update #' + str(update) + '. Queries:')
for query in schema_updates[update]:
log.info('\t' + query)
query_db(query)
if __name__ == '__main__':
try:
if not query_db('SELECT * FROM rb_schema'):
db_create_tables()
except:
log.error('There is no rb_schema table in the database.')
log.error('Will run a full db_create_tables().')
db_create_tables()
db_update()
|