Compare commits
431 commits
Author | SHA1 | Date | |
---|---|---|---|
b22f571a92 | |||
44e7cd566f | |||
c670df643e | |||
2c44f88460 | |||
23db42fed3 | |||
92ee6b9aaf | |||
63223b8c46 | |||
b26d0a28d1 | |||
14626c0bfe | |||
8fb99041ba | |||
55d4091256 | |||
9bd42944a9 | |||
75f337b995 | |||
580a8830e3 | |||
083f961ac4 | |||
cd1b2a0b26 | |||
402894405d | |||
84737a4871 | |||
47b1759d0f | |||
6d50b4bff4 | |||
0f68776819 | |||
598f8230bd | |||
ca0214c606 | |||
07831d7de3 | |||
71a5b0da96 | |||
32ea8d2224 | |||
c1cfe52cb0 | |||
9216d980b6 | |||
e73f7c5aa3 | |||
4461a91f87 | |||
b96c3a0479 | |||
cc6b084d7a | |||
97b8bb1d85 | |||
2d36db435f | |||
e96234488f | |||
878acf002f | |||
d4da5a199f | |||
8c62314100 | |||
46df7f92a7 | |||
|
082f2582dd | ||
|
6594c6b912 | ||
|
73e1bf81ce | ||
53f1c4958c | |||
fb49d8eae2 | |||
|
81be4229c1 | ||
|
22f3eb3893 | ||
|
5d4d84f705 | ||
13dd238843 | |||
|
c45488dd73 | ||
|
1e522e2b39 | ||
|
f66932da48 | ||
|
7dfd160d8e | ||
|
3bc14ba365 | ||
36afb35e08 | |||
9f5458c8f4 | |||
78a66dc089 | |||
1452de6280 | |||
5ec5cb66df | |||
a56bd28e64 | |||
f636280f10 | |||
7f3a6719e2 | |||
46d8ba611c | |||
57a11d4385 | |||
fc253c616d | |||
e262ae9d90 | |||
39da49363f | |||
6c4f36bfba | |||
ded21b9131 | |||
|
bf35e3fbae | ||
|
6ba2994dcf | ||
46f93c28ea | |||
378f5c4538 | |||
bfa06c95f8 | |||
a9799f4ca8 | |||
848186bd1a | |||
95b4856179 | |||
8dedc8b95b | |||
fde834ad02 | |||
89f2c54973 | |||
db115ad140 | |||
e0bc795735 | |||
48ce55e73e | |||
2b30ab6bf9 | |||
665bd0dfe8 | |||
207252401f | |||
8152bc0d14 | |||
3fe8bcb997 | |||
3b98605fb7 | |||
b27570ef11 | |||
8a70e76032 | |||
a756f2c814 | |||
a333d4eb9d | |||
6cbf7d4009 | |||
c2cf97436d | |||
1dc9a7b0fc | |||
0699473333 | |||
d6f172b777 | |||
4222f15162 | |||
1293e081c6 | |||
5feb615e12 | |||
e6af891748 | |||
530163a492 | |||
544d6883a0 | |||
d43c3aa26c | |||
6f30d5d144 | |||
61575c0f79 | |||
b718a0e4ea | |||
12383cbd06 | |||
62965d4f90 | |||
|
e7b3e13597 | ||
|
ae1d8c553f | ||
64045f7dec | |||
ab45412a7c | |||
|
4cc81f9950 | ||
a0539604dc | |||
40b31ed691 | |||
|
92fb616980 | ||
|
ae2a920a42 | ||
|
56b0b7c990 | ||
02dbe1af37 | |||
3bb8925bd3 | |||
e9f6e1d577 | |||
a355b6124d | |||
5722fccf31 | |||
2f632fc4c3 | |||
891aa882ef | |||
51644d8cd9 | |||
8caa98644f | |||
8862440053 | |||
3f3de027ce | |||
687b6bf680 | |||
82ea9e2266 | |||
cdc2eeebba | |||
3914e718c3 | |||
281e285673 | |||
041f1d2640 | |||
a68025ecf0 | |||
d1c0bcd015 | |||
7a9949fc5e | |||
1924781bb6 | |||
aa1af6c2bd | |||
ac8c31f2ac | |||
45f9a5a396 | |||
f4da9ad666 | |||
|
60db240866 | ||
637afde869 | |||
e6ac6834e7 | |||
f666c9d0d1 | |||
1310c35256 | |||
e253848a15 | |||
234c7a0cb2 | |||
9416e0552d | |||
|
f3d8eea546 | ||
|
ffc062727a | ||
|
3e23b4a5bf | ||
af0bb71ee0 | |||
76f96b47b2 | |||
35fa4cd0f1 | |||
da28758137 | |||
cb43734b7b | |||
535e3c33fa | |||
|
148ddacbe8 | ||
|
c533ea7c07 | ||
|
93dc0d6d87 | ||
08b16adb1f | |||
19242b816c | |||
824cb2d30d | |||
bc692c46c6 | |||
949dbab501 | |||
932b0c963e | |||
bac3b3c64b | |||
66a9d293e6 | |||
0be38475b7 | |||
071183ff6c | |||
|
5307a7b91a | ||
09a180c3cd | |||
88c91c0856 | |||
73fbb59377 | |||
27b40c63a1 | |||
15baa605a0 | |||
9f5cc4d156 | |||
397eb29d81 | |||
da2aa77b53 | |||
bfc4e5e64f | |||
|
bca4c80127 | ||
|
a45b373019 | ||
9c3a4002af | |||
a85be8730c | |||
702d38a6cc | |||
3de78b1113 | |||
6e75a653bc | |||
c72ff72e0d | |||
6190347401 | |||
|
0f493e9b47 | ||
e5a2b6fc0e | |||
|
010197d900 | ||
|
8f173295ab | ||
|
6a1c8b0ecd | ||
a132a181c8 | |||
3bee8a357d | |||
|
2e601ac181 | ||
1adc67c297 | |||
4c3a5986d6 | |||
|
68849fecf3 | ||
|
1207ddbf70 | ||
|
08620589f0 | ||
|
258ffbd979 | ||
|
7b08045114 | ||
|
c82e94805e | ||
|
ab3be6ec8e | ||
|
1827320f8c | ||
|
ef253b7b87 | ||
|
edcfa1c8ce | ||
|
9a8f902d3c | ||
|
46f661ddd6 | ||
|
d90cbea35c | ||
|
837484a393 | ||
|
140d661254 | ||
|
462859f4fb | ||
|
34a8bf9160 | ||
|
7356950897 | ||
|
60ee718160 | ||
|
b2b593796a | ||
dbf3d84471 | |||
b6f6090ce5 | |||
|
f68f383ba6 | ||
|
c41b9808d5 | ||
|
1bcb6d0cef | ||
|
2a3130b170 | ||
|
bfd0299844 | ||
0e7e7e3357 | |||
9ab6921105 | |||
65f29ad8aa | |||
4444314ead | |||
eef8e54776 | |||
9cb0e2683c | |||
|
078d48730d | ||
|
ca5b16a161 | ||
|
9406edbc9e | ||
|
9cf112a9ac | ||
|
5f7e37fb4c | ||
|
83daa044d5 | ||
|
8e3eafbad1 | ||
443bcd1a19 | |||
d6098cfa91 | |||
6f3c89c91d | |||
255bcd9e9c | |||
d5f4dc8732 | |||
923bc3d9d4 | |||
485515e47a | |||
4d60b9aa48 | |||
|
5897484cb2 | ||
|
72393c9098 | ||
|
7bb9e2d8fe | ||
f7bc55f2e3 | |||
|
550dd59a19 | ||
|
338fa9dea3 | ||
|
9333773335 | ||
d89c5f5a1b | |||
c5e09f4ba3 | |||
43b9bf93ed | |||
6a248b930c | |||
|
a601c143d6 | ||
|
7633b63f21 | ||
|
cb11326cd4 | ||
|
a28b0829b3 | ||
|
36f2064034 | ||
|
5a6ce19c33 | ||
|
f0632dff6f | ||
|
b0fc7c1d9d | ||
|
f1214e01a3 | ||
|
13f2f91295 | ||
|
82bcdf9f3b | ||
|
126fe38465 | ||
|
87442c4a09 | ||
|
b631b05de2 | ||
|
db3f87934b | ||
|
3680336b22 | ||
|
f8b17e6b17 | ||
|
ae5c5cb67d | ||
|
c17802d84f | ||
|
f40a081198 | ||
|
7358567e55 | ||
|
f3aa2a2c90 | ||
6a645b7358 | |||
d67aedd5aa | |||
993d1171c2 | |||
3fe07c7b34 | |||
|
d6c04e861e | ||
|
161fec77af | ||
|
b564191b81 | ||
|
8897f525fe | ||
|
5a9c1237a1 | ||
|
2a1b75ba2b | ||
|
6f95f7e965 | ||
|
302d53835a | ||
|
86b5bf870d | ||
|
1bba2d7855 | ||
|
c80ee590bd | ||
|
915468bbf1 | ||
|
48ba9b7bfe | ||
|
64cf4fc350 | ||
|
95ef54fc82 | ||
|
dde9d5b2da | ||
|
f3178cace0 | ||
|
2874b54c40 | ||
|
289a58d9dc | ||
|
afbf9357b0 | ||
|
6658244a18 | ||
|
ade1ea3a38 | ||
|
c6ff42d391 | ||
|
a0254b3e70 | ||
|
82c5614440 | ||
|
fdd00a17ab | ||
|
b8c40861f3 | ||
|
79c40473b4 | ||
|
6a70220dee | ||
|
07b5daff9f | ||
|
d247846e35 | ||
|
462bbf5942 | ||
|
c9ab07832e | ||
|
4e2d069b26 | ||
|
b52e3bc0b0 | ||
|
5548c04a3e | ||
|
ff302a9c06 | ||
|
7fa8402eee | ||
92a43fbfdd | |||
888b4aa8b6 | |||
5d33d4cfaf | |||
|
8489d1ff36 | ||
|
19633e84ea | ||
|
0afec1617b | ||
68237ef382 | |||
e57d880c44 | |||
|
088b55acc3 | ||
|
2e82778e87 | ||
|
e1d950ad5b | ||
|
0910a05bbd | ||
|
e5168e1b4f | ||
|
740e76b73e | ||
|
c6128ca24b | ||
|
8ee69d7211 | ||
|
c566cd0215 | ||
675b57a579 | |||
31971c8e6e | |||
|
294a46df86 | ||
|
6dc2b89cb6 | ||
|
0df7da0af8 | ||
|
13686ceae5 | ||
|
b514a6a43d | ||
|
2ad04e97dd | ||
|
fb9391fe61 | ||
|
b43c6f4d79 | ||
977678b325 | |||
|
e15bd00fe5 | ||
1dd1fe54ba | |||
bb4decad71 | |||
3fa6e9c174 | |||
|
16748b8995 | ||
|
594c6d5917 | ||
|
c76cead3f7 | ||
|
e69d1beb9f | ||
a9ff2dd033 | |||
|
045dbcf23d | ||
|
579f9d38e6 | ||
9dc3fe7abe | |||
a249eb0572 | |||
|
87a6b95e8c | ||
|
fa1cf92702 | ||
c9f8a16896 | |||
4a42cffc6c | |||
70e240e7b0 | |||
71a016fdae | |||
f24a9297d0 | |||
|
dd6535296d | ||
|
edcbf73386 | ||
|
8279bc1c26 | ||
|
0628938898 | ||
|
f7823da321 | ||
|
0efcec460d | ||
|
4126cab805 | ||
|
94feffdcc2 | ||
3fe6e83300 | |||
10f78877e9 | |||
|
7c04469825 | ||
|
e7daf0366b | ||
|
16c403b6b5 | ||
|
17420c389b | ||
|
0d918192ea | ||
4c42865ca7 | |||
cd633087d5 | |||
cdce07c808 | |||
f9fcd2c733 | |||
fc734dc2cd | |||
3ba9b93365 | |||
68db07067d | |||
147163a92b | |||
|
392286dd98 | ||
|
928821fec6 | ||
|
9195c91ab5 | ||
|
aa4db9b6a8 | ||
e8f918230f | |||
|
7c4bf56da6 | ||
|
b09001916b | ||
|
1b4b27e1c5 | ||
|
49f7502e0c | ||
|
f66fa8bf19 | ||
|
96ee7649b7 | ||
|
6ec40365ab | ||
|
be26e56fd4 | ||
9c326679ba | |||
7c7282894b | |||
cc092ca9d8 | |||
21065c9f44 | |||
37e68f91f4 | |||
bd0ab7f554 | |||
|
1e3fcf984f | ||
|
009fa49e9e | ||
|
5285872865 | ||
|
b4996939af | ||
|
1e190e42c7 | ||
|
eb786088e7 | ||
|
9a271add7b | ||
|
e45497e97c | ||
|
0d1b00b13c | ||
|
2ada9881ff | ||
|
216669fe99 | ||
a3f9e9aba0 | |||
|
88377adb7c | ||
|
b279eb521b | ||
|
987230c15f |
81 changed files with 8385 additions and 2131 deletions
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
_build
|
||||
*~
|
||||
*#
|
||||
_opam
|
130
.ocp-indent
Normal file
130
.ocp-indent
Normal file
|
@ -0,0 +1,130 @@
|
|||
# -*- conf -*-
|
||||
# This is an example configuration file for ocp-indent
|
||||
#
|
||||
# Copy to the root of your project with name ".ocp-indent", customise, and
|
||||
# transparently get consistent indentation on all your ocaml source files.
|
||||
|
||||
# Starting the configuration file with a preset ensures you won't fallback to
|
||||
# definitions from "~/.ocp/ocp-indent.conf".
|
||||
# These are `normal`, `apprentice` and `JaneStreet` and set different defaults.
|
||||
normal
|
||||
|
||||
#
|
||||
# INDENTATION VALUES
|
||||
#
|
||||
|
||||
# Number of spaces used in all base cases, for example:
|
||||
# let foo =
|
||||
# ^^bar
|
||||
base = 2
|
||||
|
||||
# Indent for type definitions:
|
||||
# type t =
|
||||
# ^^int
|
||||
type = 2
|
||||
|
||||
# Indent after `let in` (unless followed by another `let`):
|
||||
# let foo = () in
|
||||
# ^^bar
|
||||
in = 0
|
||||
|
||||
# Indent after `match/try with` or `function`:
|
||||
# match foo with
|
||||
# ^^| _ -> bar
|
||||
with = 0
|
||||
|
||||
# Indent for clauses inside a pattern-match (after the arrow):
|
||||
# match foo with
|
||||
# | _ ->
|
||||
# ^^^^bar
|
||||
# the default is 2, which aligns the pattern and the expression
|
||||
match_clause = 2
|
||||
|
||||
# Indentation for items inside extension nodes:
|
||||
# [%% id.id
|
||||
# ^^^^contents ]
|
||||
# [@@id
|
||||
# ^^^^foo
|
||||
# ]
|
||||
ppx_stritem_ext = 2
|
||||
|
||||
# When nesting expressions on the same line, their indentation are in
|
||||
# some cases stacked, so that it remains correct if you close them one
|
||||
# at a line. This may lead to large indents in complex code though, so
|
||||
# this parameter can be used to set a maximum value. Note that it only
|
||||
# affects indentation after function arrows and opening parens at end
|
||||
# of line.
|
||||
#
|
||||
# for example (left: `none`; right: `4`)
|
||||
# let f = g (h (i (fun x -> # let f = g (h (i (fun x ->
|
||||
# x) # x)
|
||||
# ) # )
|
||||
# ) # )
|
||||
max_indent = 4
|
||||
|
||||
|
||||
#
|
||||
# INDENTATION TOGGLES
|
||||
#
|
||||
|
||||
# Wether the `with` parameter should be applied even when in a sub-block.
|
||||
# Can be `always`, `never` or `auto`.
|
||||
# if `always`, there are no exceptions
|
||||
# if `auto`, the `with` parameter is superseded when seen fit (most of the time,
|
||||
# but not after `begin match` for example)
|
||||
# if `never`, `with` is only applied if the match block starts a line.
|
||||
#
|
||||
# For example, the following is not indented if set to `always`:
|
||||
# let f = function
|
||||
# ^^| Foo -> bar
|
||||
strict_with = never
|
||||
|
||||
# Controls indentation after the `else` keyword. `always` indents after the
|
||||
# `else` keyword normally, like after `then`.
|
||||
# If set to `never', the `else` keyword won't indent when followed by a newline.
|
||||
# `auto` indents after `else` unless in a few "unclosable" cases (`let in`,
|
||||
# `match`...).
|
||||
#
|
||||
# For example, with `strict_else=never`:
|
||||
# if cond then
|
||||
# foo
|
||||
# else
|
||||
# bar;
|
||||
# baz
|
||||
# `never` is discouraged if you may encounter code like this example,
|
||||
# because it hides the scoping error (`baz` is always executed)
|
||||
strict_else = always
|
||||
|
||||
# Ocp-indent will normally try to preserve your in-comment indentation, as long
|
||||
# as it respects the left-margin or starts with `(*\n`. Setting this to `true`
|
||||
# forces alignment within comments.
|
||||
strict_comments = false
|
||||
|
||||
# Toggles preference of column-alignment over line indentation for most
|
||||
# of the common operators and after mid-line opening parentheses.
|
||||
#
|
||||
# for example (left: `false'; right: `true')
|
||||
# let f x = x # let f x = x
|
||||
# + y # + y
|
||||
align_ops = true
|
||||
|
||||
# Function parameters are normally indented one level from the line containing
|
||||
# the function. This option can be used to have them align relative to the
|
||||
# column of the function body instead.
|
||||
# if set to `always`, always align below the function
|
||||
# if `auto`, only do that when seen fit (mainly, after arrows)
|
||||
# if `never`, no alignment whatsoever
|
||||
#
|
||||
# for example (left: `never`; right: `always or `auto)
|
||||
# match foo with # match foo with
|
||||
# | _ -> some_fun # | _ -> some_fun
|
||||
# ^^parameter # ^^parameter
|
||||
align_params = auto
|
||||
|
||||
|
||||
#
|
||||
# SYNTAX EXTENSIONS
|
||||
#
|
||||
|
||||
# You can also add syntax extensions (as per the --syntax command-line option):
|
||||
# syntax = mll lwt
|
27
CHANGES.md
Normal file
27
CHANGES.md
Normal file
|
@ -0,0 +1,27 @@
|
|||
## v0.2.0 (2024-09-05)
|
||||
|
||||
A whole slew of changes. Internally, we made a lot of incremental changes and improvements without doing a release. Thus this release is rather big. There is a lot of database migrations to apply, and unfortunately they have to be applied one at a time.
|
||||
|
||||
* Add a /failed-builds/ endpoint that lists the most recent failed builds.
|
||||
* By default don't display failed builds on the front page.
|
||||
* Times are printed with the 'Z' time zone offset indicator.
|
||||
* Link to comparisons of builds take into account whether the "input", among others the list of dependencies, is different.
|
||||
* New subcommand `builder-db extract-build` takes a build UUID and extracts the builder "full" file.
|
||||
* Add /job/<job>/build/<build>/all.tar.gz endpoint with a gzip compressed tar archive of all build artifacts.
|
||||
* Visual overhaul.
|
||||
* Add (optional) visualizations displaying package dependencies ("opam-graph") and for unikernels a "modulectomy" view of how much each OCaml module is contributing to the final binary size. The visualizations are read from a cache on disk and can be generated from a script.
|
||||
* A script hook is added on file upload. It may be used to generate visualizations or publish system packages to a repository.
|
||||
* The 404 file not found page tries to be more informative.
|
||||
* The build page for a unikernel build displays the solo5 device manifest, e.g. `with block devices "storage", and net devices "service"`.
|
||||
* URLs with trailing slash redirect to without the trailing slash.
|
||||
* Builder-web will try to be more helpful if its database doesn't exist or the database version is wrong.
|
||||
* The opam diff works for mirage 4 unikernels taking into account the opam-monorepo/duniverse packages.
|
||||
* Markdown rendering is now done using cmarkit instead of omd.
|
||||
* Builder-web doesn't display jobs older than 30 days (customizable with `--expired-jobs` command line argument) on the front page.
|
||||
* Build artifacts are stored by their content, and artifacts are automatically deduplicated. This makes builder-web much more space efficient on deployments that don't use deduplication on the filesystem level.
|
||||
* New subcommands `builder-db vacuum *` to remove older builds. Can be called from a cron job to keep disk usage bounded.
|
||||
* Lots of other improvements and bug fixes.
|
||||
|
||||
## v0.1.0 (2021-11-12)
|
||||
|
||||
* Initial public release
|
3
LICENSE.md
Normal file
3
LICENSE.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
66
README.md
Normal file
66
README.md
Normal file
|
@ -0,0 +1,66 @@
|
|||
# Builder-web - a web frontend for reproducible builds
|
||||
|
||||
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
||||
Produced binaries can be downloaded and executed.
|
||||
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
|
||||
|
||||
## Overview
|
||||
|
||||
Builder-web is a single binary web server using a sqlite3 database with versioned schemas.
|
||||
Finished builds from [builder](https://github.com/robur-coop/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user.
|
||||
Users can:
|
||||
|
||||
* Get an overview of *jobs* - a job is typically script or opam package that is run and builds an artifact,
|
||||
* Browse all *builds* of a job - each job may have multiple builds, that are executed periodically by builder
|
||||
* Browse individual *build* and download artifacts and build information for reproducing the same binary.
|
||||
* Compare two builds, observing the differences in used opam packages, environment variables, and system packages.
|
||||
* Search for the SHA-256 hash of a binary to view a build that resulted in that binary.
|
||||
|
||||
## Installation
|
||||
|
||||
Installing from source can be done with opam: `opam install builder-web`.
|
||||
|
||||
We also provide [reproducible binary packages](https://builds.robur.coop/job/builder-web/).
|
||||
|
||||
## Setup
|
||||
|
||||
Builder-web consists of a binary `builder-web` that runs a web server on port 3000 listening on all interfaces by default.
|
||||
These values can be changed with the `--port` and `--host` flags respectively.
|
||||
See `builder-web --help` for more information.
|
||||
|
||||
Service scripts for FreeBSD and systemd are provided.
|
||||
|
||||
The web server expects a sqlite3 database in its data directory.
|
||||
An empty database can be created with `builder-db migrate`.
|
||||
|
||||
## Database migrations
|
||||
|
||||
The sqlite3 database builder-web uses contains versioning information.
|
||||
On every schema change the database schema version is updated, and migration and rollback scripts are provided.
|
||||
The tool for database migrations is `builder-migrations`.
|
||||
See the `builder-migrations --help` output for each migration for further details.
|
||||
|
||||
## Less common workflows
|
||||
|
||||
Here are listed some less common but useful workflows:
|
||||
|
||||
### Extracting builds from one server to another
|
||||
|
||||
This is useful for development on a separate machine that doesn't run the build jobs itself.
|
||||
|
||||
On the source server:
|
||||
```ocaml
|
||||
builder-db extract-build <build-hash> --dest <build-hash>.full
|
||||
```
|
||||
|
||||
After copying the file over the destination server (you need a user first,
|
||||
see `builder-db user-add --help`):
|
||||
```ocaml
|
||||
curl --data-binary @<build-hash>.full http://<user>:<passwd>@localhost:<builder-web-port>/upload
|
||||
```
|
||||
|
||||
## JSON Responses
|
||||
Some endpoints return JSON when the headers contain `Accept: application/json`.
|
||||
- `/compare/:build_left/:build_right`
|
||||
- `/job/:job/build/latest/**`
|
||||
- `/job/:job/build/latest`
|
|
@ -12,9 +12,9 @@ let scrypt_params ?(scrypt_n = 16384) ?(scrypt_r = 8) ?(scrypt_p = 1) () =
|
|||
{ scrypt_n; scrypt_r; scrypt_p }
|
||||
|
||||
type pbkdf2_sha256 =
|
||||
[ `Pbkdf2_sha256 of Cstruct.t * Cstruct.t * pbkdf2_sha256_params ]
|
||||
[ `Pbkdf2_sha256 of string * string * pbkdf2_sha256_params ]
|
||||
|
||||
type scrypt = [ `Scrypt of Cstruct.t * Cstruct.t * scrypt_params ]
|
||||
type scrypt = [ `Scrypt of string * string * scrypt_params ]
|
||||
|
||||
type password_hash = [ pbkdf2_sha256 | scrypt ]
|
||||
|
||||
|
@ -25,10 +25,10 @@ type 'a user_info = {
|
|||
}
|
||||
|
||||
let pbkdf2_sha256 ~params:{ pbkdf2_sha256_iter = count } ~salt ~password =
|
||||
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
|
||||
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password
|
||||
|
||||
let scrypt ~params:{ scrypt_n = n; scrypt_r = r; scrypt_p = p } ~salt ~password =
|
||||
Scrypt_kdf.scrypt_kdf ~n ~r ~p ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
|
||||
Scrypt.scrypt ~n ~r ~p ~dk_len:32l ~salt ~password
|
||||
|
||||
let hash ?(scrypt_params=scrypt_params ())
|
||||
~username ~password ~restricted () =
|
||||
|
@ -43,10 +43,10 @@ let hash ?(scrypt_params=scrypt_params ())
|
|||
let verify_password password user_info =
|
||||
match user_info.password_hash with
|
||||
| `Pbkdf2_sha256 (password_hash, salt, params) ->
|
||||
Cstruct.equal
|
||||
String.equal
|
||||
(pbkdf2_sha256 ~params ~salt ~password)
|
||||
password_hash
|
||||
| `Scrypt (password_hash, salt, params) ->
|
||||
Cstruct.equal
|
||||
String.equal
|
||||
(scrypt ~params ~salt ~password)
|
||||
password_hash
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
(library
|
||||
(name builder_web_auth)
|
||||
(libraries pbkdf scrypt-kdf mirage-crypto-rng))
|
||||
(libraries kdf.pbkdf kdf.scrypt mirage-crypto-rng))
|
||||
|
|
|
@ -1,250 +0,0 @@
|
|||
open Rresult.R.Infix
|
||||
|
||||
let or_die exit_code = function
|
||||
| Ok r -> r
|
||||
| Error (`Msg msg) ->
|
||||
Format.eprintf "Error: %s" msg;
|
||||
exit exit_code
|
||||
| Error (#Caqti_error.t as e) ->
|
||||
Format.eprintf "Database error: %a" Caqti_error.pp e;
|
||||
exit exit_code
|
||||
|
||||
let foreign_keys =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
"PRAGMA foreign_keys = ON"
|
||||
|
||||
let connect uri =
|
||||
Caqti_blocking.connect uri >>= fun (module Db : Caqti_blocking.CONNECTION) ->
|
||||
Db.exec foreign_keys () >>= fun () ->
|
||||
Ok (module Db : Caqti_blocking.CONNECTION)
|
||||
|
||||
let do_migrate dbpath =
|
||||
connect (Uri.make ~scheme:"sqlite3" ~path:dbpath ())
|
||||
>>= fun (module Db : Caqti_blocking.CONNECTION) ->
|
||||
List.fold_left
|
||||
(fun r migrate ->
|
||||
r >>= fun () ->
|
||||
Logs.debug (fun m -> m "Executing migration query: %a" Caqti_request.pp migrate);
|
||||
Db.exec migrate ())
|
||||
(Ok ())
|
||||
Builder_db.migrate
|
||||
|
||||
let migrate () dbpath =
|
||||
or_die 1 (do_migrate dbpath)
|
||||
|
||||
let user_mod action dbpath scrypt_n scrypt_r scrypt_p username unrestricted =
|
||||
let scrypt_params = Builder_web_auth.scrypt_params ?scrypt_n ?scrypt_r ?scrypt_p () in
|
||||
let r =
|
||||
connect
|
||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
>>= fun (module Db : Caqti_blocking.CONNECTION) ->
|
||||
print_string "Password: ";
|
||||
flush stdout;
|
||||
(* FIXME: getpass *)
|
||||
let password = read_line () in
|
||||
let restricted = not unrestricted in
|
||||
let user_info = Builder_web_auth.hash ~scrypt_params ~username ~password ~restricted () in
|
||||
match action with
|
||||
| `Add ->
|
||||
Db.exec Builder_db.User.add user_info
|
||||
| `Update ->
|
||||
Db.exec Builder_db.User.update_user user_info
|
||||
in
|
||||
or_die 1 r
|
||||
|
||||
let user_add () dbpath = user_mod `Add dbpath
|
||||
|
||||
let user_update () dbpath = user_mod `Update dbpath
|
||||
|
||||
let user_list () dbpath =
|
||||
let r =
|
||||
connect
|
||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
>>= fun (module Db : Caqti_blocking.CONNECTION) ->
|
||||
Db.iter_s Builder_db.User.get_all
|
||||
(fun username -> Ok (print_endline username))
|
||||
()
|
||||
in
|
||||
or_die 1 r
|
||||
|
||||
let user_remove () dbpath username =
|
||||
let r =
|
||||
connect
|
||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
>>= fun (module Db : Caqti_blocking.CONNECTION) ->
|
||||
Db.exec Builder_db.Access_list.remove_all_by_username username >>= fun () ->
|
||||
Db.exec Builder_db.User.remove_user username
|
||||
in
|
||||
or_die 1 r
|
||||
|
||||
let user_disable () dbpath username =
|
||||
let r =
|
||||
connect
|
||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
>>= fun (module Db : Caqti_blocking.CONNECTION) ->
|
||||
Db.exec Builder_db.Access_list.remove_all_by_username username >>= fun () ->
|
||||
Db.find_opt Builder_db.User.get_user username >>= function
|
||||
| None -> Error (`Msg "user not found")
|
||||
| Some (_, user_info) ->
|
||||
let password_hash = `Scrypt (Cstruct.empty, Cstruct.empty, Builder_web_auth.scrypt_params ()) in
|
||||
let user_info = { user_info with password_hash ; restricted = true } in
|
||||
Db.exec Builder_db.User.update_user user_info
|
||||
in
|
||||
or_die 1 r
|
||||
|
||||
let access_add () dbpath username jobname =
|
||||
let r =
|
||||
connect
|
||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
>>= fun (module Db : Caqti_blocking.CONNECTION) ->
|
||||
Db.find_opt Builder_db.User.get_user username >>=
|
||||
Option.to_result ~none:(`Msg "unknown user") >>= fun (user_id, _) ->
|
||||
Db.find_opt Builder_db.Job.get_id_by_name jobname >>=
|
||||
Option.to_result ~none:(`Msg "job not found") >>= fun job_id ->
|
||||
Db.exec Builder_db.Access_list.add (user_id, job_id)
|
||||
in
|
||||
or_die 1 r
|
||||
|
||||
let access_remove () dbpath username jobname =
|
||||
let r =
|
||||
connect
|
||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
>>= fun (module Db : Caqti_blocking.CONNECTION) ->
|
||||
Db.find_opt Builder_db.User.get_user username >>=
|
||||
Option.to_result ~none:(`Msg "unknown user") >>= fun (user_id, _) ->
|
||||
Db.find_opt Builder_db.Job.get_id_by_name jobname >>=
|
||||
Option.to_result ~none:(`Msg "job not found") >>= fun job_id ->
|
||||
Db.exec Builder_db.Access_list.remove (user_id, job_id)
|
||||
in
|
||||
or_die 1 r
|
||||
|
||||
let help man_format cmds = function
|
||||
| None -> `Help (man_format, None)
|
||||
| Some cmd ->
|
||||
if List.mem cmd cmds
|
||||
then `Help (man_format, Some cmd)
|
||||
else `Error (true, "Unknown command: " ^ cmd)
|
||||
|
||||
let dbpath =
|
||||
let doc = "sqlite3 database path" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt non_dir_file "/var/db/builder-web/builder.sqlite3" &
|
||||
info ~doc ["dbpath"])
|
||||
|
||||
let dbpath_new =
|
||||
let doc = "sqlite3 database path" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt string "/var/db/builder-web/builder.sqlite3" &
|
||||
info ~doc ["dbpath"])
|
||||
|
||||
let username =
|
||||
let doc = "username" in
|
||||
Cmdliner.Arg.(required &
|
||||
pos 0 (some string) None &
|
||||
info ~doc ~docv:"USERNAME" [])
|
||||
|
||||
let password_iter =
|
||||
let doc = "password hash count" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some int) None &
|
||||
info ~doc ["hash-count"])
|
||||
|
||||
let scrypt_n =
|
||||
let doc = "scrypt n parameter" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some int) None &
|
||||
info ~doc ["scrypt-n"])
|
||||
|
||||
let scrypt_r =
|
||||
let doc = "scrypt r parameter" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some int) None &
|
||||
info ~doc ["scrypt-r"])
|
||||
|
||||
let scrypt_p =
|
||||
let doc = "scrypt p parameter" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some int) None &
|
||||
info ~doc ["scrypt-p"])
|
||||
|
||||
let unrestricted =
|
||||
let doc = "unrestricted user" in
|
||||
Cmdliner.Arg.(value & flag & info ~doc [ "unrestricted" ])
|
||||
|
||||
let job =
|
||||
let doc = "job" in
|
||||
Cmdliner.Arg.(required &
|
||||
pos 1 (some string) None &
|
||||
info ~doc ~docv:"JOB" [])
|
||||
|
||||
let setup_log =
|
||||
let setup_log level =
|
||||
Logs.set_level level;
|
||||
Logs.set_reporter (Logs_fmt.reporter ~dst:Format.std_formatter ());
|
||||
Logs.debug (fun m -> m "Set log level %s" (Logs.level_to_string level))
|
||||
in
|
||||
Cmdliner.Term.(const setup_log $ Logs_cli.level ())
|
||||
|
||||
let migrate_cmd =
|
||||
let doc = "create database and add tables" in
|
||||
Cmdliner.Term.(pure migrate $ setup_log $ dbpath_new),
|
||||
Cmdliner.Term.info ~doc "migrate"
|
||||
|
||||
let user_add_cmd =
|
||||
let doc = "add a user" in
|
||||
(Cmdliner.Term.(pure user_add $ setup_log $ dbpath $ scrypt_n $ scrypt_r $ scrypt_p $ username $ unrestricted),
|
||||
Cmdliner.Term.info ~doc "user-add")
|
||||
|
||||
let user_update_cmd =
|
||||
let doc = "update a user password" in
|
||||
(Cmdliner.Term.(pure user_update $ setup_log $ dbpath $ scrypt_n $ scrypt_r $ scrypt_p $ username $ unrestricted),
|
||||
Cmdliner.Term.info ~doc "user-update")
|
||||
|
||||
let user_remove_cmd =
|
||||
let doc = "remove a user" in
|
||||
(Cmdliner.Term.(pure user_remove $ setup_log $ dbpath $ username),
|
||||
Cmdliner.Term.info ~doc "user-remove")
|
||||
|
||||
let user_disable_cmd =
|
||||
let doc = "disable a user" in
|
||||
(Cmdliner.Term.(pure user_disable $ setup_log $ dbpath $ username),
|
||||
Cmdliner.Term.info ~doc "user-disable")
|
||||
|
||||
let user_list_cmd =
|
||||
let doc = "list all users" in
|
||||
(Cmdliner.Term.(pure user_list $ setup_log $ dbpath),
|
||||
Cmdliner.Term.info ~doc "user-list")
|
||||
|
||||
let access_add_cmd =
|
||||
let doc = "grant access to user and job" in
|
||||
(Cmdliner.Term.(pure access_add $ setup_log $ dbpath $ username $ job),
|
||||
Cmdliner.Term.info ~doc "access-add")
|
||||
|
||||
let access_remove_cmd =
|
||||
let doc = "remove access to user and job" in
|
||||
(Cmdliner.Term.(pure access_remove $ setup_log $ dbpath $ username $ job),
|
||||
Cmdliner.Term.info ~doc "access-remove")
|
||||
|
||||
let help_cmd =
|
||||
let topic =
|
||||
let doc = "Command to get help on" in
|
||||
Cmdliner.Arg.(value & pos 0 (some string) None & info ~doc ~docv:"COMMAND" [])
|
||||
in
|
||||
let doc = "Builder database help" in
|
||||
Cmdliner.Term.(ret (const help $ man_format $ choice_names $ topic)),
|
||||
Cmdliner.Term.info ~doc "help"
|
||||
|
||||
let default_cmd =
|
||||
let doc = "Builder database command" in
|
||||
Cmdliner.Term.(ret (const help $ man_format $ choice_names $ const None)),
|
||||
Cmdliner.Term.info ~doc "builder-db"
|
||||
|
||||
let () =
|
||||
Mirage_crypto_rng_unix.initialize ();
|
||||
Cmdliner.Term.eval_choice
|
||||
default_cmd
|
||||
[help_cmd; migrate_cmd;
|
||||
user_add_cmd; user_update_cmd; user_remove_cmd; user_list_cmd; user_disable_cmd;
|
||||
access_add_cmd; access_remove_cmd]
|
||||
|> Cmdliner.Term.exit
|
1131
bin/builder_db_app.ml
Normal file
1131
bin/builder_db_app.ml
Normal file
File diff suppressed because it is too large
Load diff
17
bin/builder_system.ml
Normal file
17
bin/builder_system.ml
Normal file
|
@ -0,0 +1,17 @@
|
|||
let uname =
|
||||
let cmd = Bos.Cmd.(v "uname" % "-s") in
|
||||
lazy (match Bos.OS.Cmd.(run_out cmd |> out_string |> success) with
|
||||
| Ok s when s = "FreeBSD" -> `FreeBSD
|
||||
| Ok s when s = "Linux" -> `Linux
|
||||
| Ok s -> invalid_arg (Printf.sprintf "OS %s not supported" s)
|
||||
| Error (`Msg m) -> invalid_arg m)
|
||||
|
||||
let default_datadir =
|
||||
match Lazy.force uname with
|
||||
| `FreeBSD -> "/var/db/builder-web"
|
||||
| `Linux -> "/var/lib/builder-web"
|
||||
|
||||
let default_configdir =
|
||||
match Lazy.force uname with
|
||||
| `FreeBSD -> "/usr/local/etc/builder-web"
|
||||
| `Linux -> "/etc/builder-web"
|
|
@ -30,7 +30,7 @@ let write_raw s buf =
|
|||
safe_close s >|= fun () ->
|
||||
Error `Exception)
|
||||
in
|
||||
(* Logs.debug (fun m -> m "writing %a" Cstruct.hexdump_pp (Cstruct.of_bytes buf)) ; *)
|
||||
(* Logs.debug (fun m -> m "writing %a" (Ohex.pp_hexdump ()) (Bytes.unsafe_to_string buf)) ; *)
|
||||
w 0 (Bytes.length buf)
|
||||
|
||||
let process =
|
||||
|
@ -78,30 +78,101 @@ let init_influx name data =
|
|||
in
|
||||
Lwt.async report
|
||||
|
||||
let setup_app level influx port host datadir =
|
||||
let run_batch_viz ~cachedir ~datadir ~configdir =
|
||||
let open Rresult.R.Infix in
|
||||
begin
|
||||
let script = Fpath.(configdir / "batch-viz.sh")
|
||||
and script_log = Fpath.(cachedir / "batch-viz.log")
|
||||
and viz_script = Fpath.(configdir / "upload-hooks" / "visualizations.sh")
|
||||
in
|
||||
Bos.OS.File.exists script >>= fun script_exists ->
|
||||
if not script_exists then begin
|
||||
Logs.warn (fun m -> m "Didn't find %s" (Fpath.to_string script));
|
||||
Ok ()
|
||||
end else
|
||||
let args =
|
||||
[ "--cache-dir=" ^ Fpath.to_string cachedir;
|
||||
"--data-dir=" ^ Fpath.to_string datadir;
|
||||
"--viz-script=" ^ Fpath.to_string viz_script ]
|
||||
|> List.map (fun s -> "\"" ^ String.escaped s ^ "\"")
|
||||
|> String.concat " "
|
||||
in
|
||||
(*> Note: The reason for appending, is that else a new startup could
|
||||
overwrite an existing running batch's log*)
|
||||
(Fpath.to_string script ^ " " ^ args
|
||||
^ " 2>&1 >> " ^ Fpath.to_string script_log
|
||||
^ " &")
|
||||
|> Sys.command
|
||||
|> ignore
|
||||
|> Result.ok
|
||||
end
|
||||
|> function
|
||||
| Ok () -> ()
|
||||
| Error err ->
|
||||
Logs.warn (fun m ->
|
||||
m "Error while starting batch-viz.sh: %a"
|
||||
Rresult.R.pp_msg err)
|
||||
|
||||
let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag expired_jobs =
|
||||
let dbpath = Printf.sprintf "%s/builder.sqlite3" datadir in
|
||||
let datadir = Fpath.v datadir in
|
||||
let cachedir =
|
||||
cachedir |> Option.fold ~none:Fpath.(datadir / "_cache") ~some:Fpath.v
|
||||
in
|
||||
let configdir = Fpath.v configdir in
|
||||
let () = Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna) in
|
||||
let () = init_influx "builder-web" influx in
|
||||
let () =
|
||||
if run_batch_viz_flag then
|
||||
run_batch_viz ~cachedir ~datadir ~configdir
|
||||
in
|
||||
match Builder_web.init dbpath datadir with
|
||||
| exception Sqlite3.Error e ->
|
||||
Format.eprintf "Error: @[@,%s.\
|
||||
@,Does the database file exist? Create with `builder-db migrate`.@]\n%!"
|
||||
e;
|
||||
exit 2
|
||||
| Error (#Caqti_error.load as e) ->
|
||||
Format.eprintf "Error: %a\n%!" Caqti_error.pp e;
|
||||
exit 2
|
||||
| Error (#Caqti_error.connect | #Caqti_error.call_or_retrieve | `Msg _ | `Wrong_version _ as e) ->
|
||||
| Error (`Wrong_version _ as e) ->
|
||||
Format.eprintf "Error: @[@,%a.\
|
||||
@,Migrate database version with `builder-migrations`,\
|
||||
@,or start with a fresh database with `builder-db migrate`.@]\n%!"
|
||||
Builder_web.pp_error e;
|
||||
| Error (
|
||||
#Caqti_error.connect
|
||||
| #Caqti_error.call_or_retrieve
|
||||
| `Msg _ as e
|
||||
) ->
|
||||
Format.eprintf "Error: %a\n%!" Builder_web.pp_error e;
|
||||
exit 1
|
||||
| Ok () ->
|
||||
let level = match level with None -> None | Some Logs.Debug -> Some `Debug | Some Info -> Some `Info | Some Warning -> Some `Warning | Some Error -> Some `Error | Some App -> None in
|
||||
let level = match level with
|
||||
| None -> None
|
||||
| Some Logs.Debug -> Some `Debug
|
||||
| Some Info -> Some `Info
|
||||
| Some Warning -> Some `Warning
|
||||
| Some Error -> Some `Error
|
||||
| Some App -> None
|
||||
in
|
||||
let error_handler = Dream.error_template Builder_web.error_template in
|
||||
Dream.initialize_log ?level ();
|
||||
Dream.run ~port ~interface:host ~https:false
|
||||
let dream_routes = Builder_web.(
|
||||
routes ~datadir ~cachedir ~configdir ~expired_jobs
|
||||
|> to_dream_routes
|
||||
)
|
||||
in
|
||||
Dream.run ~port ~interface:host ~tls:false ~error_handler
|
||||
@@ Dream.logger
|
||||
@@ Dream.sql_pool ("sqlite3:" ^ dbpath)
|
||||
@@ Http_status_metrics.handle
|
||||
@@ Builder_web.add_routes datadir
|
||||
@@ Dream.not_found
|
||||
@@ Builder_web.Middleware.remove_trailing_url_slash
|
||||
@@ Dream.router dream_routes
|
||||
|
||||
open Cmdliner
|
||||
|
||||
let ip_port : (Ipaddr.V4.t * int) Arg.converter =
|
||||
let ip_port : (Ipaddr.V4.t * int) Arg.conv =
|
||||
let default_port = 8094 in
|
||||
let parse s =
|
||||
match
|
||||
|
@ -113,16 +184,40 @@ let ip_port : (Ipaddr.V4.t * int) Arg.converter =
|
|||
end
|
||||
| _ -> Error "multiple : found"
|
||||
with
|
||||
| Error msg -> `Error msg
|
||||
| Error msg -> Error (`Msg msg)
|
||||
| Ok (ip, port) -> match Ipaddr.V4.of_string ip with
|
||||
| Ok ip -> `Ok (ip, port)
|
||||
| Error `Msg msg -> `Error msg
|
||||
| Ok ip -> Ok (ip, port)
|
||||
| Error `Msg msg -> Error (`Msg msg)
|
||||
in
|
||||
parse, fun ppf (ip, port) -> Format.fprintf ppf "%a:%d" Ipaddr.V4.pp ip port
|
||||
let printer ppf (ip, port) =
|
||||
Format.fprintf ppf "%a:%d" Ipaddr.V4.pp ip port in
|
||||
Arg.conv (parse, printer)
|
||||
|
||||
let datadir =
|
||||
let doc = "data directory" in
|
||||
Arg.(value & opt dir "/var/db/builder-web/" & info [ "d"; "datadir" ] ~doc)
|
||||
let docv = "DATA_DIR" in
|
||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
|
||||
Arg.(
|
||||
value &
|
||||
opt dir Builder_system.default_datadir &
|
||||
info ~env [ "d"; "datadir" ] ~doc ~docv
|
||||
)
|
||||
|
||||
let cachedir =
|
||||
let doc = "cache directory" in
|
||||
let docv = "CACHE_DIR" in
|
||||
Arg.(
|
||||
value
|
||||
& opt (some ~none:"DATADIR/_cache" dir) None
|
||||
& info [ "cachedir" ] ~doc ~docv)
|
||||
|
||||
let configdir =
|
||||
let doc = "config directory" in
|
||||
let docv = "CONFIG_DIR" in
|
||||
Arg.(
|
||||
value &
|
||||
opt dir Builder_system.default_configdir &
|
||||
info [ "c"; "configdir" ] ~doc ~docv)
|
||||
|
||||
let port =
|
||||
let doc = "port" in
|
||||
|
@ -133,13 +228,31 @@ let host =
|
|||
Arg.(value & opt string "0.0.0.0" & info [ "h"; "host" ] ~doc)
|
||||
|
||||
let influx =
|
||||
let doc = "IP address and port (default: 8094) to report metrics to in influx line protocol" in
|
||||
Arg.(value & opt (some ip_port) None & info [ "influx" ] ~doc ~docv:"INFLUXHOST[:PORT]")
|
||||
let doc = "IP address and port (default: 8094) to report metrics to \
|
||||
influx line protocol" in
|
||||
Arg.(
|
||||
value &
|
||||
opt (some ip_port) None &
|
||||
info [ "influx" ] ~doc ~docv:"INFLUXHOST[:PORT]")
|
||||
|
||||
let run_batch_viz =
|
||||
let doc = "Run CONFIG_DIR/batch-viz.sh on startup. \
|
||||
Note that this is started in the background - so the user \
|
||||
is in charge of not running several instances of this. A \
|
||||
log is written to CACHE_DIR/batch-viz.log" in
|
||||
Arg.(value & flag & info [ "run-batch-viz" ] ~doc)
|
||||
|
||||
let expired_jobs =
|
||||
let doc = "Amount of days after which a job is considered to be inactive if \
|
||||
no successful build has been achieved (use 0 for infinite)" in
|
||||
Arg.(value & opt int 30 & info [ "expired-jobs" ] ~doc)
|
||||
|
||||
let () =
|
||||
let term = Term.(pure setup_app $ Logs_cli.level () $ influx $ port $ host $ datadir) in
|
||||
let info = Term.info "Builder web" ~doc:"Builder web" ~man:[] in
|
||||
match Term.eval (term, info) with
|
||||
| `Ok () -> exit 0
|
||||
| `Error _ -> exit 1
|
||||
| _ -> exit 0
|
||||
let term =
|
||||
Term.(const setup_app $ Logs_cli.level () $ influx $ port $ host $ datadir $
|
||||
cachedir $ configdir $ run_batch_viz $ expired_jobs)
|
||||
in
|
||||
let info = Cmd.info "Builder web" ~doc:"Builder web" ~man:[] in
|
||||
Cmd.v info term
|
||||
|> Cmd.eval
|
||||
|> exit
|
||||
|
|
16
bin/dune
16
bin/dune
|
@ -1,11 +1,19 @@
|
|||
(library
|
||||
(name builder_system)
|
||||
(modules builder_system)
|
||||
(libraries bos))
|
||||
|
||||
(executable
|
||||
(public_name builder-web)
|
||||
(name builder_web_app)
|
||||
(modules builder_web_app)
|
||||
(libraries builder_web mirage-crypto-rng.unix cmdliner logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr ipaddr.unix http_status_metrics))
|
||||
(libraries builder_web builder_system mirage-crypto-rng.unix cmdliner
|
||||
logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr
|
||||
ipaddr.unix http_status_metrics))
|
||||
|
||||
(executable
|
||||
(public_name builder-db)
|
||||
(name builder_db)
|
||||
(modules builder_db)
|
||||
(libraries builder_db caqti.blocking uri bos fmt logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix))
|
||||
(name builder_db_app)
|
||||
(modules builder_db_app)
|
||||
(libraries builder_web builder_db builder_system caqti.blocking uri bos fmt
|
||||
logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
open Rresult.R.Infix
|
||||
|
||||
type action = Fpath.t -> Caqti_blocking.connection ->
|
||||
(unit, [ Caqti_error.call_or_retrieve | `Wrong_version of int32 * int64 | `Msg of string ]) result
|
||||
|
||||
|
@ -13,6 +11,7 @@ module type MIGRATION = sig
|
|||
val rollback : action
|
||||
end
|
||||
|
||||
|
||||
let pp_error ppf = function
|
||||
| #Caqti_error.load_or_connect | #Caqti_error.call_or_retrieve as e ->
|
||||
Caqti_error.pp ppf e
|
||||
|
@ -25,10 +24,11 @@ let pp_error ppf = function
|
|||
let or_die exit_code = function
|
||||
| Ok r -> r
|
||||
| Error e ->
|
||||
Format.eprintf "Database error: %a" pp_error e;
|
||||
Format.eprintf "Database error: %a\n" pp_error e;
|
||||
exit exit_code
|
||||
|
||||
let do_database_action action () datadir =
|
||||
let ( let* ) = Result.bind in
|
||||
let datadir = Fpath.v datadir in
|
||||
let dbpath = Fpath.(datadir / "builder.sqlite3") in
|
||||
Logs.debug (fun m -> m "Connecting to database...");
|
||||
|
@ -39,7 +39,7 @@ let do_database_action action () datadir =
|
|||
in
|
||||
Logs.debug (fun m -> m "Connected!");
|
||||
let r =
|
||||
Db.start () >>= fun () ->
|
||||
let* () = Db.start () in
|
||||
Logs.debug (fun m -> m "Started database transaction");
|
||||
match action datadir conn with
|
||||
| Ok () ->
|
||||
|
@ -47,7 +47,7 @@ let do_database_action action () datadir =
|
|||
Db.commit ()
|
||||
| Error _ as e ->
|
||||
Logs.debug (fun m -> m "Rolling back database transaction");
|
||||
Db.rollback () >>= fun () ->
|
||||
let* () = Db.rollback () in
|
||||
e
|
||||
in
|
||||
or_die 2 r
|
||||
|
@ -61,9 +61,10 @@ let help man_format migrations = function
|
|||
|
||||
let datadir =
|
||||
let doc = "data directory containing builder.sqlite3 and data files" in
|
||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt dir "/var/db/builder-web/" &
|
||||
info ~doc ["datadir"])
|
||||
opt dir Builder_system.default_datadir &
|
||||
info ~env ~doc ["datadir"; "d"])
|
||||
|
||||
let setup_log =
|
||||
let setup_log level =
|
||||
|
@ -72,23 +73,69 @@ let setup_log =
|
|||
in
|
||||
Cmdliner.Term.(const setup_log $ Logs_cli.level ())
|
||||
|
||||
open Cmdliner
|
||||
|
||||
let actions (module M : MIGRATION) =
|
||||
let c s = s ^ "-" ^ M.identifier in
|
||||
let v doc from_ver to_ver = Printf.sprintf "%s (DB version %Ld -> %Ld)" doc from_ver to_ver in
|
||||
[
|
||||
(Cmdliner.Term.(const do_database_action $ const M.migrate $ setup_log $ datadir),
|
||||
Cmdliner.Term.info ~doc:(v M.migrate_doc M.old_version M.new_version)
|
||||
(c "migrate"));
|
||||
(Cmdliner.Term.(const do_database_action $ const M.rollback $ setup_log $ datadir),
|
||||
Cmdliner.Term.info ~doc:(v M.rollback_doc M.new_version M.old_version)
|
||||
(c "rollback"));
|
||||
]
|
||||
let migrate_cmd =
|
||||
let term = Term.(
|
||||
const do_database_action $ const M.migrate $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc:(v M.migrate_doc M.old_version M.new_version)
|
||||
(c "migrate") in
|
||||
Cmd.v info term
|
||||
in
|
||||
let rollback_cmd =
|
||||
let term = Term.(
|
||||
const do_database_action $ const M.rollback $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc:(v M.rollback_doc M.new_version M.old_version)
|
||||
(c "rollback") in
|
||||
Cmd.v info term
|
||||
in
|
||||
[ migrate_cmd; rollback_cmd ]
|
||||
|
||||
let f20210308 =
|
||||
let doc = "Remove broken builds as fixed in commit a57798f4c02eb4d528b90932ec26fb0b718f1a13. \
|
||||
Note that the files on disk have to be removed manually." in
|
||||
Cmdliner.Term.(const do_database_action $ const M20210308.fixup $ setup_log $ datadir),
|
||||
Cmdliner.Term.info ~doc "fixup-2021-03-08"
|
||||
let term = Term.(const do_database_action $ const M20210308.fixup $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc "fixup-2021-03-08" in
|
||||
Cmd.v info term
|
||||
|
||||
let f20210707a =
|
||||
let doc = "Remove orb.deb and orb.txz that ended up in the build." in
|
||||
let term = Term.(const do_database_action $ const M20210707a.fixup $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc "fixup-2021-07-07a" in
|
||||
Cmd.v info term
|
||||
|
||||
let f20210707b =
|
||||
let doc = "Move *.deb.debug to bin/*.deb and remove the earlier bin/*.deb. Adjust main_binary of build." in
|
||||
let term = Term.(const do_database_action $ const M20210707b.fixup $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc "fixup-2021-07-07b" in
|
||||
Cmd.v info term
|
||||
|
||||
let f20210707c =
|
||||
let doc = "Strip bin/*.{hvt,xen} if no *.{hvt,xen} exists. Adjust build_artifact table and main_binary of build." in
|
||||
let term = Term.(const do_database_action $ const M20210707c.fixup $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc "fixup-2021-07-07c" in
|
||||
Cmd.v info term
|
||||
|
||||
let f20210707d =
|
||||
let doc = "Remove ./ from filepath." in
|
||||
let term = Term.(const do_database_action $ const M20210707d.fixup $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc "fixup-2021-07-07d" in
|
||||
Cmd.v info term
|
||||
|
||||
let f20210712b =
|
||||
let doc = "Remove build-hashes and README from artifacts." in
|
||||
let term = Term.(const do_database_action $ const M20210712b.fixup $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc "fixup-2021-07-12b" in
|
||||
Cmd.v info term
|
||||
|
||||
let f20210910 =
|
||||
let doc = "Undo builds with script and console mixed up." in
|
||||
let term = Term.(const do_database_action $ const M20210910.fixup $ setup_log $ datadir) in
|
||||
let info = Cmd.info ~doc "fixup-2021-09-10" in
|
||||
Cmd.v info term
|
||||
|
||||
let help_cmd =
|
||||
let topic =
|
||||
|
@ -96,17 +143,16 @@ let help_cmd =
|
|||
Cmdliner.Arg.(value & pos 0 (some string) None & info ~doc ~docv:"MIGRATION" [])
|
||||
in
|
||||
let doc = "Builder migration help" in
|
||||
Cmdliner.Term.(ret (const help $ man_format $ choice_names $ topic)),
|
||||
Cmdliner.Term.info ~doc "help"
|
||||
|
||||
let default_cmd =
|
||||
let doc = "Builder migration command" in
|
||||
Cmdliner.Term.(ret (const help $ man_format $ choice_names $ const None)),
|
||||
Cmdliner.Term.info ~doc "builder-migrations"
|
||||
let term = Term.(ret (const help $ Arg.man_format $ choice_names $ topic)) in
|
||||
let info = Cmd.info ~doc "help" in
|
||||
Cmd.v info term
|
||||
|
||||
let () =
|
||||
Cmdliner.Term.eval_choice
|
||||
default_cmd
|
||||
let doc = "Builder migration command" in
|
||||
let default_term = Term.(ret (const help $ Arg.man_format $ choice_names $ const None)) in
|
||||
let default_info = Cmd.info ~doc "builder-migrations" in
|
||||
Cmd.group
|
||||
~default:default_term default_info
|
||||
(List.concat [
|
||||
[ help_cmd ];
|
||||
actions (module M20210126);
|
||||
|
@ -119,5 +165,23 @@ let () =
|
|||
actions (module M20210602);
|
||||
actions (module M20210608);
|
||||
actions (module M20210609);
|
||||
actions (module M20210625);
|
||||
actions (module M20210629);
|
||||
actions (module M20210630);
|
||||
actions (module M20210701);
|
||||
actions (module M20210706);
|
||||
[ f20210707a ];
|
||||
[ f20210707b ];
|
||||
[ f20210707c ];
|
||||
[ f20210707d ];
|
||||
actions (module M20210712a);
|
||||
[ f20210712b ];
|
||||
actions (module M20210712c);
|
||||
[ f20210910 ];
|
||||
actions (module M20211105);
|
||||
actions (module M20220509);
|
||||
actions (module M20230911);
|
||||
actions (module M20230914);
|
||||
])
|
||||
|> Cmdliner.Term.exit
|
||||
|> Cmd.eval
|
||||
|> exit
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
(executable
|
||||
(public_name builder-migrations)
|
||||
(name builder_migrations)
|
||||
(libraries builder_db caqti caqti-driver-sqlite3 caqti.blocking cmdliner logs logs.cli logs.fmt ))
|
||||
(libraries builder_system builder_db caqti caqti-driver-sqlite3
|
||||
caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))
|
||||
|
|
|
@ -1,29 +1,47 @@
|
|||
(* Grej is utilities *)
|
||||
open Rresult.R.Infix
|
||||
module Syntax = struct
|
||||
open Caqti_request.Infix
|
||||
let ( let* ) = Result.bind
|
||||
let ( let+ ) x f = Result.map f x
|
||||
let ( ->. ) = ( ->. ) ~oneshot:true
|
||||
let ( ->! ) = ( ->! ) ~oneshot:true
|
||||
let ( ->? ) = ( ->? ) ~oneshot:true
|
||||
let ( ->* ) = ( ->* ) ~oneshot:true
|
||||
end
|
||||
|
||||
module Infix = struct
|
||||
open Caqti_request.Infix
|
||||
let ( >>= ) = Result.bind
|
||||
let ( >>| ) x f = Result.map f x
|
||||
let ( ->. ) = ( ->. ) ~oneshot:true
|
||||
let ( ->! ) = ( ->! ) ~oneshot:true
|
||||
let ( ->? ) = ( ->? ) ~oneshot:true
|
||||
let ( ->* ) = ( ->* ) ~oneshot:true
|
||||
end
|
||||
|
||||
open Syntax
|
||||
|
||||
let set_version version =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
(Printf.sprintf "PRAGMA user_version = %Ld" version)
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
Printf.sprintf "PRAGMA user_version = %Ld" version
|
||||
|
||||
let check_version
|
||||
?application_id:(desired_application_id=Builder_db.application_id)
|
||||
~user_version:desired_user_version
|
||||
(module Db : Caqti_blocking.CONNECTION) =
|
||||
Db.find Builder_db.get_application_id () >>= fun application_id ->
|
||||
Db.find Builder_db.get_version () >>= fun user_version ->
|
||||
let* application_id = Db.find Builder_db.get_application_id () in
|
||||
let* user_version = Db.find Builder_db.get_version () in
|
||||
if application_id <> desired_application_id || user_version <> desired_user_version
|
||||
then Error (`Wrong_version (application_id, user_version))
|
||||
else Ok ()
|
||||
|
||||
let list_iter_result f xs =
|
||||
List.fold_left
|
||||
(fun r x -> r >>= fun () -> f x)
|
||||
(fun r x -> let* () = r in f x)
|
||||
(Ok ())
|
||||
xs
|
||||
|
||||
let foreign_keys on =
|
||||
let on = if on then "ON" else "OFF" in
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
(Printf.sprintf "PRAGMA foreign_keys = %s" on)
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
Printf.sprintf "PRAGMA foreign_keys = %s" on
|
||||
|
|
|
@ -3,35 +3,30 @@ let identifier = "2021-01-26"
|
|||
let migrate_doc = "add column main_binary to build"
|
||||
let rollback_doc = "remove column main_binary from build"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let set_application_id =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
(Printf.sprintf "PRAGMA application_id = %ld" Builder_db.application_id)
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
Printf.sprintf "PRAGMA application_id = %ld" Builder_db.application_id
|
||||
|
||||
let alter_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE build ADD COLUMN main_binary TEXT"
|
||||
|
||||
let all_builds =
|
||||
Caqti_request.collect ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.int64
|
||||
Caqti_type.unit ->* Caqti_type.int64 @@
|
||||
"SELECT id FROM build"
|
||||
|
||||
let bin_artifact =
|
||||
Caqti_request.collect ~oneshot:true
|
||||
Caqti_type.int64
|
||||
Caqti_type.(tup2 int64 string)
|
||||
Caqti_type.int64 ->* Caqti_type.(t2 int64 string) @@
|
||||
"SELECT id, filepath FROM build_artifact WHERE build = ? AND filepath LIKE 'bin/%'"
|
||||
|
||||
let set_main_binary =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.(tup2 int64 (option string))
|
||||
"UPDATE build SET main_binary = ?2 WHERE id = ?1"
|
||||
Caqti_type.(t2 int64 (option string)) ->. Caqti_type.unit @@
|
||||
"UPDATE build SET main_binary = $2 WHERE id = $1"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
let open Grej.Infix in
|
||||
Grej.check_version ~application_id:0l ~user_version:0L (module Db) >>= fun () ->
|
||||
Db.exec alter_build () >>= fun () ->
|
||||
Db.collect_list all_builds () >>= fun builds ->
|
||||
|
@ -52,13 +47,11 @@ let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
|||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rename_build =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE build RENAME TO __tmp_build"
|
||||
|
||||
let create_build =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
|
@ -78,8 +71,7 @@ let create_build =
|
|||
|}
|
||||
|
||||
let rollback_data =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| INSERT INTO build
|
||||
SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_kind, result_code, result_msg, console, script, job
|
||||
|
@ -87,7 +79,7 @@ let rollback_data =
|
|||
|}
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
let open Grej.Infix in
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec rename_build () >>= fun () ->
|
||||
Db.exec create_build () >>= fun () ->
|
||||
|
|
|
@ -3,12 +3,11 @@ let identifier = "2021-02-02"
|
|||
let migrate_doc = "add index job_build_idx on build"
|
||||
let rollback_doc = "rollback index job_build_idx on build"
|
||||
|
||||
open Rresult.R.Infix
|
||||
open Grej.Infix
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let job_build_idx =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX job_build_idx ON build(job)";
|
||||
in
|
||||
Grej.check_version ~user_version:1L (module Db) >>= fun () ->
|
||||
|
@ -16,8 +15,7 @@ let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
|||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let q =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS job_build_idx"
|
||||
in
|
||||
Grej.check_version ~user_version:1L (module Db) >>= fun () ->
|
||||
|
|
|
@ -4,14 +4,14 @@ let identifier = "2021-02-16"
|
|||
let migrate_doc = "change to scrypt hashed passwords (NB: destructive!!)"
|
||||
let rollback_doc = "rollback scrypt hashed passwords (NB: destructive!!)"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let drop_user =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE user"
|
||||
|
||||
let new_user =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE user (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
|
@ -24,8 +24,7 @@ let new_user =
|
|||
|}
|
||||
|
||||
let old_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE user (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
|
@ -36,14 +35,12 @@ let old_user =
|
|||
|}
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec drop_user () >>= fun () ->
|
||||
Db.exec new_user () >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec drop_user () >>= fun () ->
|
||||
Db.exec old_user () >>= fun () ->
|
||||
|
|
|
@ -4,9 +4,10 @@ let identifier = "2021-02-18"
|
|||
let migrate_doc = "add column size to build_file and build_artifact"
|
||||
let rollback_doc = "remove column size to build_file and build_artifact"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let new_build_artifact =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build_artifact (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
|
@ -21,8 +22,7 @@ let new_build_artifact =
|
|||
|}
|
||||
|
||||
let new_build_file =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build_file (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
|
@ -37,53 +37,42 @@ let new_build_file =
|
|||
|}
|
||||
|
||||
let collect_build_artifact =
|
||||
Caqti_request.collect ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.(tup3 int64 (tup3 string string octets) int64)
|
||||
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@
|
||||
"SELECT id, filepath, localpath, sha256, build FROM build_artifact"
|
||||
|
||||
let collect_build_file =
|
||||
Caqti_request.collect ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.(tup3 int64 (tup3 string string octets) int64)
|
||||
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@
|
||||
"SELECT id, filepath, localpath, sha256, build FROM build_file"
|
||||
|
||||
let insert_new_build_artifact =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.(tup3 int64 (tup4 string string octets int64) int64)
|
||||
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build_artifact (id, filepath, localpath, sha256, size, build)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
|}
|
||||
|
||||
let insert_new_build_file =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.(tup3 int64 (tup4 string string octets int64) int64)
|
||||
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build_file (id, filepath, localpath, sha256, size, build)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
|}
|
||||
|
||||
let drop_build_artifact =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build_artifact"
|
||||
|
||||
let drop_build_file =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build_file"
|
||||
|
||||
let rename_build_artifact =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build_artifact RENAME TO build_artifact"
|
||||
|
||||
let rename_build_file =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build_file RENAME TO build_file"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec new_build_artifact () >>= fun () ->
|
||||
Db.rev_collect_list collect_build_artifact () >>= fun build_artifacts ->
|
||||
|
@ -110,8 +99,7 @@ let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
|||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let old_build_artifact =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build_artifact (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
|
@ -125,8 +113,7 @@ let old_build_artifact =
|
|||
|}
|
||||
|
||||
let old_build_file =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build_file (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
|
@ -140,17 +127,14 @@ let old_build_file =
|
|||
|}
|
||||
|
||||
let copy_build_artifact =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"INSERT INTO new_build_artifact SELECT id, filepath, localpath, sha256, build FROM build_artifact"
|
||||
|
||||
let copy_build_file =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"INSERT INTO new_build_file SELECT id, filepath, localpath, sha256, build FROM build_file"
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec old_build_artifact () >>= fun () ->
|
||||
Db.exec copy_build_artifact () >>= fun () ->
|
||||
|
|
|
@ -1,22 +1,21 @@
|
|||
module Rep = Builder_db.Rep
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let broken_builds =
|
||||
Caqti_request.collect ~oneshot:true
|
||||
Caqti_type.unit
|
||||
(Caqti_type.tup3 Rep.id Rep.uuid Caqti_type.string)
|
||||
Caqti_type.unit ->* Caqti_type.t3 (Rep.id `build) Rep.uuid Caqti_type.string @@
|
||||
{| SELECT b.id, b.uuid, job.name FROM build b, job
|
||||
WHERE result_kind = 0 AND result_code = 0 AND main_binary IS NOT NULL AND job.id = b.job AND
|
||||
(SELECT COUNT( * ) FROM build_artifact a
|
||||
WHERE a.build = b.id and a.filepath = b.main_binary) = 0
|
||||
|}
|
||||
|
||||
let fixup _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:3L (module Db) >>= fun () ->
|
||||
Db.rev_collect_list broken_builds () >>= fun broken_builds ->
|
||||
Grej.list_iter_result
|
||||
(fun ((build, uuid, job_name) : Rep.id * Uuidm.t * string) ->
|
||||
Format.printf "Removing job %a.\nPlease clean up data files in /var/db/builder-web/%s/%a\n"
|
||||
Uuidm.pp uuid job_name Uuidm.pp uuid;
|
||||
(fun ((build, uuid, job_name) : [`build] Rep.id * Uuidm.t * string) ->
|
||||
Format.printf "Removing job %a.\nPlease clean up data files in %a/%s/%a\n"
|
||||
Uuidm.pp uuid Fpath.pp datadir job_name Uuidm.pp uuid;
|
||||
Db.exec Builder_db.Build.remove build)
|
||||
broken_builds
|
||||
|
|
|
@ -3,17 +3,15 @@ let identifier = "2021-04-27"
|
|||
let migrate_doc = "add index idx_build_job_start on build"
|
||||
let rollback_doc = "rollback index idx_build_job_start on build"
|
||||
|
||||
open Rresult.R.Infix
|
||||
open Grej.Infix
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let idx_build_job_start =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)"
|
||||
in
|
||||
let rm_job_build_idx =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS job_build_idx"
|
||||
in
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
|
@ -22,13 +20,11 @@ let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
|||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let job_build_idx =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX job_build_idx ON build(job)"
|
||||
in
|
||||
let rm_idx_build_job_start =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_job_start"
|
||||
in
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
|
|
|
@ -4,21 +4,19 @@ let identifier = "2021-05-31"
|
|||
let migrate_doc = "remove datadir prefix from build_artifact.localpath"
|
||||
let rollback_doc = "add datadir prefix to build_artifact.localpath"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let build_artifacts =
|
||||
Caqti_request.collect ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.(tup2 Builder_db.Rep.id Builder_db.Rep.fpath)
|
||||
Caqti_type.unit ->* Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@
|
||||
"SELECT id, localpath FROM build_artifact"
|
||||
|
||||
let build_artifact_update_localpath =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.(tup2 Builder_db.Rep.id Builder_db.Rep.fpath)
|
||||
"UPDATE build_artifact SET localpath = ?2 WHERE id = ?1"
|
||||
Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@
|
||||
"UPDATE build_artifact SET localpath = $2 WHERE id = $1"
|
||||
|
||||
(* We are not migrating build_file because it is unused *)
|
||||
|
||||
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.collect_list build_artifacts () >>= fun artifacts ->
|
||||
Grej.list_iter_result (fun (id, localpath) ->
|
||||
|
@ -29,7 +27,6 @@ let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
|||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.collect_list build_artifacts () >>= fun artifacts ->
|
||||
Grej.list_iter_result (fun (id, localpath) ->
|
||||
|
|
|
@ -3,13 +3,14 @@ let identifier = "2021-06-02"
|
|||
let migrate_doc = "build.main_binary foreign key"
|
||||
let rollback_doc = "build.main_binary filepath"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let idx_build_job_start =
|
||||
Caqti_request.exec Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)"
|
||||
|
||||
let new_build =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
|
@ -31,8 +32,7 @@ let new_build =
|
|||
|}
|
||||
|
||||
let old_build =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
|
@ -53,67 +53,64 @@ let old_build =
|
|||
|}
|
||||
|
||||
let collect_old_build =
|
||||
Caqti_request.collect ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.(tup3 Builder_db.Rep.id
|
||||
(tup3 (tup4 string int64 int64 int64) (tup4 int64 int (option int) (option string)) (tup3 octets string (option string)))
|
||||
Builder_db.Rep.id)
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
||||
(t3 (t4 string int64 int64 int64)
|
||||
(t4 int64 int (option int) (option string))
|
||||
(t3 octets string (option string)))
|
||||
Builder_db.Rep.untyped_id) @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
|
||||
console, script, main_binary, job
|
||||
FROM build |}
|
||||
|
||||
let insert_new_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.(tup3 Builder_db.Rep.id
|
||||
(tup3 (tup4 string int64 int64 int64) (tup4 int64 int (option int) (option string)) (tup3 octets string (option Builder_db.Rep.id)))
|
||||
Builder_db.Rep.id)
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
||||
(t3 (t4 string int64 int64 int64)
|
||||
(t4 int64 int (option int) (option string))
|
||||
(t3 octets string (option Builder_db.Rep.untyped_id)))
|
||||
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
|
||||
result_code, result_msg, console, script, main_binary, job)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) |}
|
||||
|
||||
let drop_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build"
|
||||
|
||||
let rename_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build RENAME TO build"
|
||||
|
||||
let find_main_artifact_id =
|
||||
Caqti_request.find ~oneshot:true
|
||||
Caqti_type.(tup2 Builder_db.Rep.id string)
|
||||
Builder_db.Rep.id
|
||||
"SELECT id FROM build_artifact WHERE build = ?1 AND filepath = ?2"
|
||||
Caqti_type.(t2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@
|
||||
"SELECT id FROM build_artifact WHERE build = $1 AND filepath = $2"
|
||||
|
||||
let find_main_artifact_filepath =
|
||||
Caqti_request.find ~oneshot:true
|
||||
Builder_db.Rep.id
|
||||
Caqti_type.string
|
||||
Builder_db.Rep.untyped_id ->! Caqti_type.string @@
|
||||
"SELECT filepath FROM build_artifact WHERE id = ?"
|
||||
|
||||
let collect_new_build =
|
||||
Caqti_request.collect ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.(tup3 Builder_db.Rep.id
|
||||
(tup3 (tup4 string int64 int64 int64) (tup4 int64 int (option int) (option string)) (tup3 octets string (option Builder_db.Rep.id)))
|
||||
Builder_db.Rep.id)
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
||||
(t3 (t4 string int64 int64 int64)
|
||||
(t4 int64 int (option int) (option string))
|
||||
(t3 octets string (option Builder_db.Rep.untyped_id)))
|
||||
Builder_db.Rep.untyped_id) @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
|
||||
console, script, main_binary, job
|
||||
FROM build |}
|
||||
|
||||
let insert_old_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.(tup3 Builder_db.Rep.id
|
||||
(tup3 (tup4 string int64 int64 int64) (tup4 int64 int (option int) (option string)) (tup3 octets string (option string)))
|
||||
Builder_db.Rep.id)
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
||||
(t3 (t4 string int64 int64 int64)
|
||||
(t4 int64 int (option int) (option string))
|
||||
(t3 octets string (option string)))
|
||||
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
|
||||
result_code, result_msg, console, script, main_binary, job)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) |}
|
||||
|
||||
let migrate _ (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec new_build () >>= fun () ->
|
||||
Db.rev_collect_list collect_old_build () >>= fun builds ->
|
||||
|
@ -133,7 +130,6 @@ let migrate _ (module Db : Caqti_blocking.CONNECTION) =
|
|||
|
||||
|
||||
let rollback _ (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Rresult.R.Infix in
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec old_build () >>= fun () ->
|
||||
Db.rev_collect_list collect_new_build () >>= fun builds ->
|
||||
|
|
|
@ -3,9 +3,10 @@ let identifier = "2021-06-08"
|
|||
let migrate_doc = "add access list"
|
||||
let rollback_doc = "remove access list"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let new_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_user (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
|
@ -19,8 +20,7 @@ let new_user =
|
|||
|}
|
||||
|
||||
let old_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_user (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
|
@ -33,40 +33,35 @@ let old_user =
|
|||
|}
|
||||
|
||||
let collect_old_user =
|
||||
Caqti_request.collect
|
||||
Caqti_type.unit
|
||||
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64))
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) @@
|
||||
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p FROM user"
|
||||
|
||||
let collect_new_user =
|
||||
Caqti_request.collect
|
||||
Caqti_type.unit
|
||||
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool))
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) @@
|
||||
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted FROM user"
|
||||
|
||||
let insert_new_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool))
|
||||
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
|
||||
|
||||
let insert_old_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64))
|
||||
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p) VALUES (?, ?, ?, ?, ?, ?, ?)"
|
||||
|
||||
let drop_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE user"
|
||||
|
||||
let rename_new_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_user RENAME TO user"
|
||||
|
||||
let access_list =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE access_list (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
user INTEGER NOT NULL,
|
||||
|
@ -79,12 +74,9 @@ let access_list =
|
|||
|}
|
||||
|
||||
let rollback_access_list =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE IF EXISTS access_list"
|
||||
|
||||
open Rresult.R.Infix
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec new_user () >>= fun () ->
|
||||
|
|
|
@ -3,24 +3,23 @@ let identifier = "2021-06-09"
|
|||
let migrate_doc = "add user column to build"
|
||||
let rollback_doc = "remove user column from build"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let idx_build_job_start =
|
||||
Caqti_request.exec Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)"
|
||||
|
||||
let nologin_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"INSERT INTO user (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) \
|
||||
VALUES ('nologin', x'', x'', 16384, 8, 1, true)"
|
||||
|
||||
let remove_nologin_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DELETE FROM user WHERE username = 'nologin'"
|
||||
|
||||
let new_build =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
|
@ -44,8 +43,7 @@ let new_build =
|
|||
|}
|
||||
|
||||
let old_build =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
|
@ -67,8 +65,7 @@ let old_build =
|
|||
|}
|
||||
|
||||
let insert_from_old_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Builder_db.Rep.id
|
||||
Builder_db.Rep.id (`user : [`user]) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_kind, result_code, result_msg, console,
|
||||
script, main_binary, job, user)
|
||||
|
@ -77,8 +74,7 @@ let insert_from_old_build =
|
|||
FROM build |}
|
||||
|
||||
let insert_from_new_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_kind, result_code, result_msg, console,
|
||||
script, main_binary, job)
|
||||
|
@ -87,17 +83,13 @@ let insert_from_new_build =
|
|||
FROM build |}
|
||||
|
||||
let drop_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build"
|
||||
|
||||
let rename_build =
|
||||
Caqti_request.exec ~oneshot:true
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build RENAME TO build"
|
||||
|
||||
open Rresult.R.Infix
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec nologin_user () >>= fun () ->
|
||||
|
|
35
bin/migrations/m20210625.ml
Normal file
35
bin/migrations/m20210625.ml
Normal file
|
@ -0,0 +1,35 @@
|
|||
let new_version = 8L and old_version = 7L
|
||||
let identifier = "2021-06-25"
|
||||
let migrate_doc = "drop build_file table"
|
||||
let rollback_doc = "recreate build_file table"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let build_file =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE build_file (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
localpath TEXT NOT NULL, -- local path to the file on disk
|
||||
sha256 BLOB NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
build INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(build) REFERENCES build(id),
|
||||
UNIQUE(build, filepath)
|
||||
)
|
||||
|}
|
||||
|
||||
let drop_build_file =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build_file"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec drop_build_file () >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec build_file () >>= fun () ->
|
||||
Db.exec (Grej.set_version old_version) ()
|
153
bin/migrations/m20210629.ml
Normal file
153
bin/migrations/m20210629.ml
Normal file
|
@ -0,0 +1,153 @@
|
|||
let new_version = 9L and old_version = 8L
|
||||
let identifier = "2021-06-29"
|
||||
let migrate_doc = "add tag and job_tag table"
|
||||
let rollback_doc = "remove tag and job tag table"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let tag =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE tag (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
tag VARCHAR(255) NOT NULL UNIQUE
|
||||
)
|
||||
|}
|
||||
|
||||
let job_tag =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE job_tag (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
tag INTEGER NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(job) REFERENCES job(id),
|
||||
FOREIGN KEY(tag) REFERENCES tag(id),
|
||||
UNIQUE(tag, job)
|
||||
)
|
||||
|}
|
||||
|
||||
let jobs =
|
||||
Caqti_type.unit ->* Builder_db.Rep.untyped_id @@
|
||||
"SELECT id FROM job"
|
||||
|
||||
let latest_successful_build =
|
||||
Builder_db.Rep.untyped_id ->? Builder_db.Rep.untyped_id @@
|
||||
{| SELECT b.id
|
||||
FROM build b
|
||||
WHERE b.job = ? AND b.result_kind = 0 AND b.result_code = 0
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let build_artifacts =
|
||||
Builder_db.Rep.untyped_id ->*
|
||||
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
{| SELECT a.filepath, a.localpath
|
||||
FROM build_artifact a
|
||||
WHERE a.build = ?
|
||||
|}
|
||||
|
||||
|
||||
let infer_section_and_synopsis artifacts =
|
||||
let opam_switch =
|
||||
match List.find_opt (fun (p, _) -> String.equal (Fpath.basename p) "opam-switch") artifacts with
|
||||
| None -> None
|
||||
| Some (_, data) -> Some (OpamFile.SwitchExport.read_from_string data)
|
||||
in
|
||||
let infer_synopsis_and_descr switch =
|
||||
let root = switch.OpamFile.SwitchExport.selections.OpamTypes.sel_roots in
|
||||
if OpamPackage.Set.cardinal root <> 1 then
|
||||
None, None
|
||||
else
|
||||
let root = OpamPackage.Set.choose root in
|
||||
match OpamPackage.Name.Map.find_opt root.OpamPackage.name switch.OpamFile.SwitchExport.overlays with
|
||||
| None -> None, None
|
||||
| Some opam -> OpamFile.OPAM.synopsis opam, OpamFile.OPAM.descr_body opam
|
||||
in
|
||||
let infer_section_from_packages switch =
|
||||
let influx = OpamPackage.Name.of_string "metrics-influx" in
|
||||
if OpamPackage.Set.exists (fun p -> OpamPackage.Name.equal p.OpamPackage.name influx)
|
||||
switch.OpamFile.SwitchExport.selections.OpamTypes.sel_installed
|
||||
then
|
||||
"Unikernel (with metrics reported to Influx)"
|
||||
else
|
||||
"Unikernel"
|
||||
in
|
||||
let infer_section_from_extension =
|
||||
match List.find_opt (fun (p, _) -> Fpath.(is_prefix (v "bin/") p)) artifacts with
|
||||
| None -> None
|
||||
| Some (p, _) ->
|
||||
match Fpath.get_ext p with
|
||||
| ".deb" -> Some "Debian Package"
|
||||
| ".txz" -> Some "FreeBSD Package"
|
||||
| _ -> None
|
||||
in
|
||||
match opam_switch with
|
||||
| None -> None, (None, None)
|
||||
| Some opam_switch ->
|
||||
let section =
|
||||
match infer_section_from_extension with
|
||||
| Some x -> x
|
||||
| None -> infer_section_from_packages opam_switch
|
||||
in
|
||||
Some section, infer_synopsis_and_descr opam_switch
|
||||
|
||||
let remove_tag =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE tag"
|
||||
|
||||
let remove_job_tag =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE job_tag"
|
||||
|
||||
let insert_tag =
|
||||
Caqti_type.string ->. Caqti_type.unit @@
|
||||
"INSERT INTO tag (tag) VALUES (?)"
|
||||
|
||||
let insert_job_tag =
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"
|
||||
|
||||
let find_tag =
|
||||
Caqti_type.string ->! Builder_db.Rep.untyped_id @@
|
||||
"SELECT id FROM tag where tag = ?"
|
||||
|
||||
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec tag () >>= fun () ->
|
||||
Db.exec job_tag () >>= fun () ->
|
||||
Db.exec insert_tag "section" >>= fun () ->
|
||||
Db.exec insert_tag "synopsis" >>= fun () ->
|
||||
Db.exec insert_tag "description" >>= fun () ->
|
||||
Db.find find_tag "section" >>= fun section_id ->
|
||||
Db.find find_tag "synopsis" >>= fun synopsis_id ->
|
||||
Db.find find_tag "description" >>= fun descr_id ->
|
||||
Db.collect_list jobs () >>= fun jobs ->
|
||||
Grej.list_iter_result (fun job ->
|
||||
Db.find_opt latest_successful_build job >>= function
|
||||
| None ->
|
||||
Ok ()
|
||||
| Some build ->
|
||||
Db.collect_list build_artifacts build >>= fun artifacts ->
|
||||
List.fold_left (fun acc (fpath, lpath) ->
|
||||
acc >>= fun acc ->
|
||||
Bos.OS.File.read Fpath.(append datadir lpath) >>= fun data ->
|
||||
Ok ((fpath, data) :: acc))
|
||||
(Ok [])
|
||||
artifacts >>= fun files ->
|
||||
let sec_syn = infer_section_and_synopsis files in
|
||||
(match fst sec_syn with None -> Ok () | Some s -> Db.exec insert_job_tag (section_id, s, job)) >>= fun () ->
|
||||
(match snd sec_syn with None, _ -> Ok () | Some s, _ -> Db.exec insert_job_tag (synopsis_id, s, job)) >>= fun () ->
|
||||
(match snd sec_syn with _, None -> Ok () | _, Some s -> Db.exec insert_job_tag (descr_id, s, job)))
|
||||
jobs >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec remove_tag () >>= fun () ->
|
||||
Db.exec remove_job_tag () >>= fun () ->
|
||||
Db.exec (Grej.set_version old_version) ()
|
||||
|
||||
|
87
bin/migrations/m20210630.ml
Normal file
87
bin/migrations/m20210630.ml
Normal file
|
@ -0,0 +1,87 @@
|
|||
let new_version = 10L and old_version = 9L
|
||||
let identifier = "2021-06-30"
|
||||
let migrate_doc = "add readme.md tag"
|
||||
let rollback_doc = "remove readme.md tag"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let jobs =
|
||||
Caqti_type.unit ->* Builder_db.Rep.untyped_id @@
|
||||
"SELECT id FROM job"
|
||||
|
||||
let latest_successful_build =
|
||||
Builder_db.Rep.untyped_id ->? Builder_db.Rep.untyped_id @@
|
||||
{| SELECT b.id
|
||||
FROM build b
|
||||
WHERE b.job = ? AND b.result_kind = 0 AND b.result_code = 0
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let build_artifacts =
|
||||
Builder_db.Rep.untyped_id ->*
|
||||
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
{| SELECT a.filepath, a.localpath
|
||||
FROM build_artifact a
|
||||
WHERE a.build = ?
|
||||
|}
|
||||
|
||||
let insert_tag =
|
||||
Caqti_type.string ->. Caqti_type.unit @@
|
||||
"INSERT INTO tag (tag) VALUES (?)"
|
||||
|
||||
let insert_job_tag =
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"
|
||||
|
||||
let find_tag =
|
||||
Caqti_type.string ->! Builder_db.Rep.untyped_id @@
|
||||
"SELECT id FROM tag where tag = ?"
|
||||
|
||||
let remove_job_tag =
|
||||
Builder_db.Rep.untyped_id ->. Caqti_type.unit @@
|
||||
"DELETE FROM job_tag where tag = ?"
|
||||
|
||||
let remove_tag =
|
||||
Builder_db.Rep.untyped_id ->. Caqti_type.unit @@
|
||||
"DELETE FROM tag where id = ?"
|
||||
|
||||
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec insert_tag "readme.md" >>= fun () ->
|
||||
Db.find find_tag "readme.md" >>= fun readme_id ->
|
||||
Db.collect_list jobs () >>= fun jobs ->
|
||||
Grej.list_iter_result (fun job ->
|
||||
Db.find_opt latest_successful_build job >>= function
|
||||
| None -> Ok ()
|
||||
| Some build ->
|
||||
Db.collect_list build_artifacts build >>= fun artifacts ->
|
||||
List.fold_left (fun acc (fpath, lpath) ->
|
||||
acc >>= fun acc ->
|
||||
Bos.OS.File.read Fpath.(append datadir lpath) >>= fun data ->
|
||||
Ok ((fpath, data) :: acc))
|
||||
(Ok [])
|
||||
artifacts >>= fun files ->
|
||||
let readme =
|
||||
List.find_opt (fun (p, _) -> Fpath.(equal (v "README.md") p)) files
|
||||
in
|
||||
let readme_anywhere =
|
||||
List.find_opt (fun (p, _) -> String.equal "README.md" (Fpath.basename p)) files
|
||||
in
|
||||
(match readme, readme_anywhere with
|
||||
| None, None -> Ok ()
|
||||
| Some (_, data), _ | None, Some (_, data) ->
|
||||
Db.exec insert_job_tag (readme_id, data, job)))
|
||||
jobs >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.find find_tag "readme.md" >>= fun readme_id ->
|
||||
Db.exec remove_job_tag readme_id >>= fun () ->
|
||||
Db.exec remove_tag readme_id >>= fun () ->
|
||||
Db.exec (Grej.set_version old_version) ()
|
||||
|
||||
|
||||
|
88
bin/migrations/m20210701.ml
Normal file
88
bin/migrations/m20210701.ml
Normal file
|
@ -0,0 +1,88 @@
|
|||
let new_version = 11L and old_version = 10L
|
||||
let identifier = "2021-07-01"
|
||||
let migrate_doc = "build.main_binary deferred foreign key constraint"
|
||||
let rollback_doc = "build.main_binary immediate foreign key constraint"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let idx_build_job_start =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)"
|
||||
|
||||
let new_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_kind TINYINT NOT NULL,
|
||||
result_code INTEGER,
|
||||
result_msg TEXT,
|
||||
console BLOB NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let old_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_kind TINYINT NOT NULL,
|
||||
result_code INTEGER,
|
||||
result_msg TEXT,
|
||||
console BLOB NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id),
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let copy_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"INSERT INTO new_build SELECT * from build"
|
||||
|
||||
let drop_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build"
|
||||
|
||||
let rename_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build RENAME TO build"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec new_build () >>= fun () ->
|
||||
Db.exec copy_build () >>= fun () ->
|
||||
Db.exec drop_build () >>= fun () ->
|
||||
Db.exec rename_build () >>= fun () ->
|
||||
Db.exec idx_build_job_start () >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec old_build () >>= fun () ->
|
||||
Db.exec copy_build () >>= fun () ->
|
||||
Db.exec drop_build () >>= fun () ->
|
||||
Db.exec rename_build () >>= fun () ->
|
||||
Db.exec idx_build_job_start () >>= fun () ->
|
||||
Db.exec (Grej.set_version old_version) ()
|
91
bin/migrations/m20210706.ml
Normal file
91
bin/migrations/m20210706.ml
Normal file
|
@ -0,0 +1,91 @@
|
|||
let new_version = 12L and old_version = 11L
|
||||
let identifier = "2021-07-06"
|
||||
let migrate_doc = "add a input_id column to the build table"
|
||||
let rollback_doc = "remove the input_id column from the build table"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let add_input_id_to_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE build ADD COLUMN input_id BLOB"
|
||||
|
||||
let idx_build_job_start =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)"
|
||||
|
||||
let old_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_kind TINYINT NOT NULL,
|
||||
result_code INTEGER,
|
||||
result_msg TEXT,
|
||||
console BLOB NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let copy_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"INSERT INTO new_build SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg, console, script, main_binary, user, job FROM build"
|
||||
|
||||
let drop_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build"
|
||||
|
||||
let rename_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build RENAME TO build"
|
||||
|
||||
let drop_input_id_from_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| ALTER TABLE build DROP COLUMN input_id |}
|
||||
|
||||
let builds =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t4
|
||||
Builder_db.Rep.untyped_id
|
||||
Caqti_type.octets
|
||||
Caqti_type.octets
|
||||
Caqti_type.octets @@
|
||||
{| SELECT b.id, opam.sha256, env.sha256, system.sha256
|
||||
FROM build b, build_artifact opam, build_artifact env, build_artifact system
|
||||
WHERE opam.filepath = 'opam-switch' AND env.filepath = 'build-environment'
|
||||
AND system.filepath = 'system-packages'
|
||||
AND opam.build = b.id AND env.build = b.id AND system.build = b.id
|
||||
|}
|
||||
|
||||
let set_input_id =
|
||||
Caqti_type.t2 Builder_db.Rep.untyped_id Caqti_type.octets ->. Caqti_type.unit @@
|
||||
"UPDATE build SET input_id = $2 WHERE id = $1"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec add_input_id_to_build () >>= fun () ->
|
||||
Db.collect_list builds () >>= fun builds ->
|
||||
Grej.list_iter_result (fun (id, opam_sha, env_sha, pkg_sha) ->
|
||||
let input_id = Digestif.SHA256.(to_raw_string (digestv_string [ opam_sha ; env_sha ; pkg_sha ])) in
|
||||
Db.exec set_input_id (id, input_id))
|
||||
builds >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec old_build () >>= fun () ->
|
||||
Db.exec copy_build () >>= fun () ->
|
||||
Db.exec drop_build () >>= fun () ->
|
||||
Db.exec rename_build () >>= fun () ->
|
||||
Db.exec idx_build_job_start () >>= fun () ->
|
||||
Db.exec (Grej.set_version old_version) ()
|
17
bin/migrations/m20210707a.ml
Normal file
17
bin/migrations/m20210707a.ml
Normal file
|
@ -0,0 +1,17 @@
|
|||
open Grej.Infix
|
||||
|
||||
let orb_left_in_builds =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
{| SELECT id, localpath FROM build_artifact
|
||||
WHERE filepath = 'orb.deb' OR filepath = 'orb.txz'
|
||||
|}
|
||||
|
||||
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:12L (module Db) >>= fun () ->
|
||||
Db.rev_collect_list orb_left_in_builds () >>= fun leftover_orb ->
|
||||
Grej.list_iter_result
|
||||
(fun (id, path) ->
|
||||
Bos.OS.File.delete (Fpath.append datadir path) >>= fun () ->
|
||||
Db.exec Builder_db.Build_artifact.remove id)
|
||||
leftover_orb
|
49
bin/migrations/m20210707b.ml
Normal file
49
bin/migrations/m20210707b.ml
Normal file
|
@ -0,0 +1,49 @@
|
|||
open Grej.Infix
|
||||
|
||||
let deb_debug_left_in_builds =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build)
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
{| SELECT id, build, localpath, filepath FROM build_artifact
|
||||
WHERE filepath LIKE '%.deb.debug'
|
||||
|}
|
||||
|
||||
let get_main_binary =
|
||||
Builder_db.Rep.id `build ->? Builder_db.Rep.id `build_artifact @@
|
||||
"SELECT main_binary FROM build WHERE id = ?"
|
||||
|
||||
let get_localpath =
|
||||
Builder_db.Rep.id `build_artifact ->! Builder_db.Rep.fpath @@
|
||||
"SELECT localpath FROM build_artifact WHERE id = ?"
|
||||
|
||||
let update_paths =
|
||||
Caqti_type.t3 (Builder_db.Rep.id `build_artifact)
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
|
||||
|
||||
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:12L (module Db) >>= fun () ->
|
||||
Db.rev_collect_list deb_debug_left_in_builds () >>= fun leftover_debug ->
|
||||
Grej.list_iter_result
|
||||
(fun (id, build_id, path, fpath) ->
|
||||
(Db.find_opt get_main_binary build_id >>= function
|
||||
| None -> Ok (fun () -> Ok ())
|
||||
| Some main_id ->
|
||||
Db.find get_localpath main_id >>= fun lpath ->
|
||||
Logs.info (fun m -> m "deleting %a" Fpath.pp lpath);
|
||||
Bos.OS.File.delete (Fpath.append datadir lpath) >>= fun () ->
|
||||
Ok (fun () -> Db.exec Builder_db.Build_artifact.remove main_id)) >>= fun later ->
|
||||
Db.exec Builder_db.Build.set_main_binary (build_id, id) >>= fun () ->
|
||||
let new_path p =
|
||||
let fname = Fpath.(filename (rem_ext p)) in
|
||||
let dir = Fpath.(parent p) in
|
||||
Fpath.(dir / "bin" / fname)
|
||||
in
|
||||
Db.exec update_paths (id, new_path path, new_path fpath) >>= fun () ->
|
||||
let o = Fpath.append datadir path and n = Fpath.append datadir (new_path path) in
|
||||
Logs.info (fun m -> m "renaming %a to %a" Fpath.pp o Fpath.pp n);
|
||||
Result.map_error (fun e -> `Msg (Fmt.str "%a" Bos.OS.U.pp_error e))
|
||||
(Bos.OS.U.rename o n) >>= fun () ->
|
||||
later ())
|
||||
leftover_debug
|
60
bin/migrations/m20210707c.ml
Normal file
60
bin/migrations/m20210707c.ml
Normal file
|
@ -0,0 +1,60 @@
|
|||
open Grej.Infix
|
||||
|
||||
let all_builds_with_binary : (unit, [`build] Builder_db.Rep.id * [`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact)
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
"SELECT b.id, b.main_binary, a.filepath, a.localpath FROM build b, build_artifact a WHERE b.main_binary = a.id AND b.main_binary IS NOT NULL"
|
||||
|
||||
let build_not_stripped : ([`build] Builder_db.Rep.id, [`build_artifact] Builder_db.Rep.id, [ `Zero | `One ]) Caqti_request.t =
|
||||
Builder_db.Rep.id `build ->? Builder_db.Rep.id `build_artifact @@
|
||||
"SELECT id FROM build_artifact WHERE build = ? AND filepath LIKE '%.debug'"
|
||||
|
||||
let update_paths : ([`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, unit, [ `Zero ]) Caqti_request.t =
|
||||
Caqti_type.t3 (Builder_db.Rep.id `build_artifact)
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
|
||||
|
||||
let add_artifact : ((Fpath.t * Fpath.t * string) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t =
|
||||
Caqti_type.(t2 (t3 Builder_db.Rep.fpath Builder_db.Rep.fpath Caqti_type.octets)
|
||||
(t2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) VALUES (?, ?, ?, ?, ?)"
|
||||
|
||||
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:12L (module Db) >>= fun () ->
|
||||
Db.rev_collect_list all_builds_with_binary () >>= fun builds ->
|
||||
Grej.list_iter_result
|
||||
(fun (build_id, artifact_id, artifact_fpath, artifact_lpath) ->
|
||||
if Fpath.has_ext ".hvt" artifact_fpath || Fpath.has_ext ".xen" artifact_fpath then
|
||||
Db.find_opt build_not_stripped build_id >>= fun stripped_id ->
|
||||
if stripped_id = None then begin
|
||||
Logs.info (fun m -> m "artifact (not stripped) %a" Fpath.pp artifact_lpath);
|
||||
let path p =
|
||||
let fname = Fpath.(filename p) in
|
||||
let dir = Fpath.(parent (parent p)) in
|
||||
Fpath.(dir / fname + "debug")
|
||||
in
|
||||
let new_artifact_lpath = path artifact_lpath in
|
||||
let r =
|
||||
Sys.command (Printf.sprintf "cp %s %s"
|
||||
(Fpath.to_string (Fpath.append datadir artifact_lpath))
|
||||
(Fpath.to_string (Fpath.append datadir new_artifact_lpath)))
|
||||
in
|
||||
assert (r = 0);
|
||||
let r =
|
||||
Sys.command (Printf.sprintf "strip %s" (Fpath.to_string (Fpath.append datadir artifact_lpath)))
|
||||
in
|
||||
assert (r = 0);
|
||||
Bos.OS.File.read (Fpath.append datadir artifact_lpath) >>= fun data ->
|
||||
let size = Int64.of_int (String.length data)
|
||||
and sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
||||
Db.exec update_paths (artifact_id, new_artifact_lpath, path artifact_fpath) >>= fun () ->
|
||||
Db.exec add_artifact ((artifact_fpath, artifact_lpath, sha256), (size, build_id)) >>= fun () ->
|
||||
Db.find Builder_db.last_insert_rowid () >>= fun new_build_artifact_id ->
|
||||
Db.exec Builder_db.Build.set_main_binary (build_id, new_build_artifact_id)
|
||||
end else
|
||||
Ok ()
|
||||
else Ok ())
|
||||
builds
|
27
bin/migrations/m20210707d.ml
Normal file
27
bin/migrations/m20210707d.ml
Normal file
|
@ -0,0 +1,27 @@
|
|||
open Grej.Infix
|
||||
|
||||
let all_build_artifacts_with_dot_slash : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
"SELECT id, filepath FROM build_artifact WHERE filepath LIKE './%'"
|
||||
|
||||
let update_path : ([`build_artifact] Builder_db.Rep.id * Fpath.t, unit, [< `Zero | `One | `Many > `Zero ]) Caqti_request.t =
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build_artifact SET filepath = $2 WHERE id = $1"
|
||||
|
||||
let fixup _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:12L (module Db) >>= fun () ->
|
||||
Db.rev_collect_list all_build_artifacts_with_dot_slash () >>= fun build_artifacts ->
|
||||
Grej.list_iter_result
|
||||
(fun (id, fpath) ->
|
||||
let segs = match Fpath.segs fpath with
|
||||
| "." :: tl -> tl
|
||||
| x -> x
|
||||
in
|
||||
let fpath' = Fpath.v (String.concat "/" segs) in
|
||||
if Fpath.equal fpath fpath' then
|
||||
Ok ()
|
||||
else
|
||||
Db.exec update_path (id, fpath'))
|
||||
build_artifacts
|
166
bin/migrations/m20210712a.ml
Normal file
166
bin/migrations/m20210712a.ml
Normal file
|
@ -0,0 +1,166 @@
|
|||
let new_version = 13L and old_version = 12L
|
||||
and identifier = "2021-07-12a"
|
||||
and migrate_doc = "remove result_kind from build, add indexes idx_build_failed and idx_build_artifact_sha256"
|
||||
and rollback_doc = "add result_kind to build, remove indexes idx_build_failed and idx_build_artifact_sha256"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let new_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_code INTEGER NOT NULL,
|
||||
result_msg TEXT,
|
||||
console BLOB NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
input_id BLOB, -- sha256 (sha256<opam-switch> || sha256<build-environment> || sha256<system-packages>)
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let copy_old_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build(id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script, main_binary, user, job, input_id)
|
||||
SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, 0, result_msg,
|
||||
console, script, main_binary, user, job, input_id
|
||||
FROM build
|
||||
|}
|
||||
|
||||
let old_build_execution_result =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@
|
||||
"SELECT id, result_kind, result_code FROM build"
|
||||
|
||||
let update_new_build_execution_result =
|
||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@
|
||||
"UPDATE new_build SET result_code = $2 WHERE id = $1"
|
||||
|
||||
let old_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_kind INTEGER NOT NULL,
|
||||
result_code INTEGER,
|
||||
result_msg TEXT,
|
||||
console BLOB NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
input_id BLOB, -- sha256 (sha256<opam-switch> || sha256<build-environment> || sha256<system-packages>)
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let copy_new_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build(id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_kind, result_msg, console, script, main_binary, user, job, input_id)
|
||||
SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, 0, result_msg,
|
||||
console, script, main_binary, user, job, input_id
|
||||
FROM build
|
||||
|}
|
||||
|
||||
let new_build_execution_result =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) @@
|
||||
"SELECT id, result_code FROM build"
|
||||
|
||||
let update_old_build_execution_result =
|
||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE new_build SET result_kind = $2, result_code = $3 WHERE id = $1"
|
||||
|
||||
let drop_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build"
|
||||
|
||||
let rename_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build RENAME TO build"
|
||||
|
||||
let execution_new_of_old kind code =
|
||||
match kind, code with
|
||||
| 0, Some v -> Ok v
|
||||
| 1, Some v -> Ok (v lsl 8)
|
||||
| 2, Some v -> Ok (v lsl 16)
|
||||
| 3, None -> Ok 65536
|
||||
| _ -> Error (`Msg "bad encoding")
|
||||
|
||||
let execution_old_of_new code =
|
||||
if code <= 0xFF
|
||||
then Ok (0, Some code)
|
||||
else if code <= 0xFFFF
|
||||
then Ok (1, Some (code lsr 8))
|
||||
else if code <= 0xFFFFFF
|
||||
then Ok (2, Some (code lsr 16))
|
||||
else if code = 65536
|
||||
then Ok (3, None)
|
||||
else Error (`Msg "bad encoding")
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec new_build () >>= fun () ->
|
||||
Db.exec copy_old_build () >>= fun () ->
|
||||
Db.collect_list old_build_execution_result () >>= fun results ->
|
||||
Grej.list_iter_result (fun (id, kind, code) ->
|
||||
execution_new_of_old kind code >>= fun code' ->
|
||||
Db.exec update_new_build_execution_result (id, code'))
|
||||
results >>= fun () ->
|
||||
Db.exec drop_build () >>= fun () ->
|
||||
Db.exec rename_build () >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
||||
WHERE result_code <> 0")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_input_id ON build(input_id)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_main_binary ON build(main_binary)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec old_build () >>= fun () ->
|
||||
Db.exec copy_new_build () >>= fun () ->
|
||||
Db.collect_list new_build_execution_result () >>= fun results ->
|
||||
Grej.list_iter_result (fun (id, code) ->
|
||||
execution_old_of_new code >>= fun (kind, code') ->
|
||||
Db.exec update_old_build_execution_result (id, kind, code'))
|
||||
results >>= fun () ->
|
||||
Db.exec drop_build () >>= fun () ->
|
||||
Db.exec rename_build () >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX idx_build_artifact_sha256") () >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Grej.set_version old_version) ()
|
21
bin/migrations/m20210712b.ml
Normal file
21
bin/migrations/m20210712b.ml
Normal file
|
@ -0,0 +1,21 @@
|
|||
open Grej.Infix
|
||||
|
||||
let all_build_artifacts_like_hashes : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%.build-hashes'"
|
||||
|
||||
let all_build_artifacts_like_readme : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%README.md'"
|
||||
|
||||
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:13L (module Db) >>= fun () ->
|
||||
Db.rev_collect_list all_build_artifacts_like_hashes () >>= fun build_artifacts_build_hashes ->
|
||||
Db.rev_collect_list all_build_artifacts_like_readme () >>= fun build_artifacts_readme ->
|
||||
Grej.list_iter_result
|
||||
(fun (artifact_id, artifact_lpath) ->
|
||||
Bos.OS.File.delete (Fpath.append datadir artifact_lpath) >>= fun () ->
|
||||
Db.exec Builder_db.Build_artifact.remove artifact_id)
|
||||
(build_artifacts_build_hashes @ build_artifacts_readme)
|
223
bin/migrations/m20210712c.ml
Normal file
223
bin/migrations/m20210712c.ml
Normal file
|
@ -0,0 +1,223 @@
|
|||
let new_version = 14L and old_version = 13L
|
||||
and identifier = "2021-07-12c"
|
||||
and migrate_doc = "store script, console on disk"
|
||||
and rollback_doc = "store script, console in database"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
module Asn = struct
|
||||
let decode_strict codec cs =
|
||||
match Asn.decode codec cs with
|
||||
| Ok (a, rest) ->
|
||||
if String.length rest = 0
|
||||
then Ok a
|
||||
else Error "trailing bytes"
|
||||
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
||||
|
||||
let projections_of asn =
|
||||
let c = Asn.codec Asn.der asn in
|
||||
(decode_strict c, Asn.encode c)
|
||||
|
||||
let console =
|
||||
Asn.S.(sequence_of
|
||||
(sequence2
|
||||
(required ~label:"delta" int)
|
||||
(required ~label:"data" utf8_string)))
|
||||
|
||||
let console_of_cs, console_to_cs = projections_of console
|
||||
end
|
||||
|
||||
let new_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_code INTEGER NOT NULL,
|
||||
result_msg TEXT,
|
||||
console TEXT NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
input_id BLOB, -- sha256 (sha256<opam-switch> || sha256<build-environment> || sha256<system-packages>)
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let old_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_code INTEGER NOT NULL,
|
||||
result_msg TEXT,
|
||||
console BLOB NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
input_id BLOB, -- sha256 (sha256<opam-switch> || sha256<build-environment> || sha256<system-packages>)
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let copy_from_old_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build(id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script, main_binary, user, job, input_id)
|
||||
SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_code, result_msg,
|
||||
'', '', main_binary, user, job, input_id
|
||||
FROM build
|
||||
|}
|
||||
|
||||
let copy_from_new_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build(id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script, main_binary, user, job, input_id)
|
||||
SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_code, result_msg,
|
||||
x'', '', main_binary, user, job, input_id
|
||||
FROM build
|
||||
|}
|
||||
|
||||
let old_build_console_script =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
(t2 string Builder_db.Rep.uuid) octets string) @@
|
||||
"SELECT b.id, job.name, b.uuid, b.console, b.script FROM build b, job WHERE b.job = job.id"
|
||||
|
||||
let update_new_build_console_script =
|
||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath) ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
|
||||
|
||||
let new_build_console_script =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
"SELECT id, console, script FROM build"
|
||||
|
||||
let update_old_build_console_script =
|
||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) octets string) ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
|
||||
|
||||
let drop_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build"
|
||||
|
||||
let rename_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build RENAME TO build"
|
||||
|
||||
let console_to_string console =
|
||||
Asn.console_of_cs console
|
||||
|> Result.map_error (fun s -> `Msg s) >>| fun console ->
|
||||
List.rev_map (fun (delta, data) ->
|
||||
Printf.sprintf "%.3fs:%s\n" (Duration.to_f (Int64.of_int delta)) data)
|
||||
console
|
||||
|> String.concat ""
|
||||
|
||||
let console_of_string data =
|
||||
let lines = String.split_on_char '\n' data in
|
||||
(* remove last empty line *)
|
||||
let lines =
|
||||
match List.rev lines with
|
||||
| "" :: lines -> List.rev lines
|
||||
| _ -> lines
|
||||
in
|
||||
let console = List.map (fun line ->
|
||||
match String.split_on_char ':' line with
|
||||
| ts :: tail ->
|
||||
let delta = float_of_string (String.sub ts 0 (String.length ts - 1)) in
|
||||
Int64.to_int (Duration.of_f delta), String.concat ":" tail
|
||||
| _ -> assert false)
|
||||
lines
|
||||
in
|
||||
Asn.console_to_cs console
|
||||
|
||||
let save_console_and_script datadir job_name uuid console script =
|
||||
let out name = Fpath.(v job_name / Uuidm.to_string uuid / name + "txt") in
|
||||
let script_out = out "script" in
|
||||
Bos.OS.File.write Fpath.(datadir // script_out) script >>= fun () ->
|
||||
let console_out = out "console" in
|
||||
console_to_string console >>= fun console_data ->
|
||||
Bos.OS.File.write Fpath.(datadir // console_out) console_data >>= fun () ->
|
||||
Ok (console_out, script_out)
|
||||
|
||||
let read_console_and_script datadir console_file script_file =
|
||||
let console_file = Fpath.append datadir console_file
|
||||
and script_file = Fpath.append datadir script_file
|
||||
in
|
||||
Bos.OS.File.read console_file >>= fun console ->
|
||||
Bos.OS.File.read script_file >>= fun script ->
|
||||
let console = console_of_string console in
|
||||
Bos.OS.File.delete console_file >>= fun () ->
|
||||
Bos.OS.File.delete script_file >>= fun () ->
|
||||
Ok (console, script)
|
||||
|
||||
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.exec new_build () >>= fun () ->
|
||||
Db.exec copy_from_old_build () >>= fun () ->
|
||||
Db.collect_list old_build_console_script () >>= fun console_scripts ->
|
||||
Grej.list_iter_result (fun (id, (job_name, uuid), console, script) ->
|
||||
save_console_and_script datadir job_name uuid console script >>= fun (console_file, script_file) ->
|
||||
Db.exec update_new_build_console_script (id, console_file, script_file))
|
||||
console_scripts >>= fun () ->
|
||||
Db.exec drop_build () >>= fun () ->
|
||||
Db.exec rename_build () >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
||||
WHERE result_code <> 0")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_input_id ON build(input_id)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_main_binary ON build(main_binary)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.exec old_build () >>= fun () ->
|
||||
Db.exec copy_from_new_build () >>= fun () ->
|
||||
Db.collect_list new_build_console_script () >>= fun console_scripts ->
|
||||
Grej.list_iter_result (fun (id, console_file, script_file) ->
|
||||
read_console_and_script datadir console_file script_file >>= fun (console, script) ->
|
||||
Db.exec update_old_build_console_script (id, console, script))
|
||||
console_scripts >>= fun () ->
|
||||
Db.exec drop_build () >>= fun () ->
|
||||
Db.exec rename_build () >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
||||
WHERE result_code <> 0")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_input_id ON build(input_id)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_main_binary ON build(main_binary)")
|
||||
() >>= fun () ->
|
||||
Db.exec (Grej.set_version old_version) ()
|
21
bin/migrations/m20210910.ml
Normal file
21
bin/migrations/m20210910.ml
Normal file
|
@ -0,0 +1,21 @@
|
|||
open Grej.Infix
|
||||
|
||||
let mixups =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build]))
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
"SELECT id, console, script FROM build \
|
||||
WHERE console LIKE '%/script.txt' AND script LIKE '%/console.txt'"
|
||||
|
||||
let fixup =
|
||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build]))
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build SET console = $2, script = $3 WHERE id = $1"
|
||||
|
||||
let fixup _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
Grej.check_version ~user_version:14L (module Db) >>= fun () ->
|
||||
Db.collect_list mixups () >>= fun mixups ->
|
||||
Grej.list_iter_result (fun (id, console, script) ->
|
||||
Db.exec fixup (id, script, console))
|
||||
mixups
|
154
bin/migrations/m20211105.ml
Normal file
154
bin/migrations/m20211105.ml
Normal file
|
@ -0,0 +1,154 @@
|
|||
let new_version = 15L and old_version = 14L
|
||||
and identifier = "2021-11-05"
|
||||
and migrate_doc = "add platform to build"
|
||||
and rollback_doc = "remove platform from build"
|
||||
|
||||
open Grej.Syntax
|
||||
|
||||
let new_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_code INTEGER NOT NULL,
|
||||
result_msg TEXT,
|
||||
console TEXT NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
platform TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
input_id BLOB, -- sha256 (sha256<opam-switch> || sha256<build-environment> || sha256<system-packages>)
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let old_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
start_d INTEGER NOT NULL,
|
||||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_code INTEGER NOT NULL,
|
||||
result_msg TEXT,
|
||||
console TEXT NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
input_id BLOB, -- sha256 (sha256<opam-switch> || sha256<build-environment> || sha256<system-packages>)
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let copy_from_old_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build(id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script, platform, main_binary, user, job, input_id)
|
||||
SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_code, result_msg,
|
||||
console, script, 'PLACEHOLDER-PLATFORM', main_binary, user, job, input_id
|
||||
FROM build
|
||||
|}
|
||||
|
||||
let copy_from_new_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build(id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script, main_binary, user, job, input_id)
|
||||
SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_code, result_msg,
|
||||
console, script, main_binary, user, job, input_id
|
||||
FROM build
|
||||
|}
|
||||
|
||||
let build_id_and_user =
|
||||
Caqti_type.unit ->* Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@
|
||||
"SELECT id, user FROM build"
|
||||
|
||||
let update_new_build_platform =
|
||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@
|
||||
"UPDATE new_build SET platform = $2 WHERE id = $1"
|
||||
|
||||
let drop_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE build"
|
||||
|
||||
let rename_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"ALTER TABLE new_build RENAME TO build"
|
||||
|
||||
(*
|
||||
1|reynir
|
||||
2|freebsd-builder
|
||||
3|ubuntu-builder
|
||||
5|nologin
|
||||
6|reynir-solsort
|
||||
7|reynir-spurv
|
||||
*)
|
||||
let platform_of_user_id = function
|
||||
| 1L -> assert false
|
||||
| 2L -> "freebsd-12"
|
||||
| 3L -> "ubuntu-20.04"
|
||||
| 5L -> assert false
|
||||
| 6L -> "debian-10"
|
||||
| 7L -> "debian-11"
|
||||
| _ -> assert false
|
||||
|
||||
let idx_build_job_start =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)"
|
||||
|
||||
let idx_build_failed =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE result_code <> 0"
|
||||
|
||||
let idx_build_input_id =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_input_id ON build(input_id)"
|
||||
|
||||
let idx_build_main_binary =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_main_binary ON build(main_binary)"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
||||
let* () = Db.exec new_build () in
|
||||
let* () = Db.exec copy_from_old_build () in
|
||||
let* builds = Db.collect_list build_id_and_user () in
|
||||
let* () =
|
||||
Grej.list_iter_result (fun (id, user) ->
|
||||
let platform = platform_of_user_id user in
|
||||
Db.exec update_new_build_platform (id, platform))
|
||||
builds
|
||||
in
|
||||
let* () = Db.exec drop_build () in
|
||||
let* () = Db.exec rename_build () in
|
||||
let* () = Db.exec idx_build_job_start () in
|
||||
let* () = Db.exec idx_build_failed () in
|
||||
let* () = Db.exec idx_build_input_id () in
|
||||
let* () = Db.exec idx_build_main_binary () in
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
||||
let* () = Db.exec old_build () in
|
||||
let* () = Db.exec copy_from_new_build () in
|
||||
let* () = Db.exec drop_build () in
|
||||
let* () = Db.exec rename_build () in
|
||||
let* () = Db.exec idx_build_job_start () in
|
||||
let* () = Db.exec idx_build_failed () in
|
||||
let* () = Db.exec idx_build_input_id () in
|
||||
let* () = Db.exec idx_build_main_binary () in
|
||||
Db.exec (Grej.set_version old_version) ()
|
||||
|
56
bin/migrations/m20220509.ml
Normal file
56
bin/migrations/m20220509.ml
Normal file
|
@ -0,0 +1,56 @@
|
|||
let new_version = 16L and old_version = 15L
|
||||
and identifier = "2022-05-09"
|
||||
and migrate_doc = "switch uuid encoding to hex"
|
||||
and rollback_doc = "switch uuid encoding back to binary"
|
||||
|
||||
open Grej.Infix
|
||||
|
||||
let old_uuid_rep =
|
||||
let encode uuid = Ok (Uuidm.to_binary_string uuid) in
|
||||
let decode s =
|
||||
Uuidm.of_binary_string s
|
||||
|> Option.to_result ~none:"failed to decode uuid"
|
||||
in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.string
|
||||
|
||||
let new_uuid_rep =
|
||||
let encode uuid = Ok (Uuidm.to_string uuid) in
|
||||
let decode s =
|
||||
Uuidm.of_string s
|
||||
|> Option.to_result ~none:"failed to decode uuid"
|
||||
in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.string
|
||||
|
||||
let uuids_byte_encoded_q =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@
|
||||
"SELECT id, uuid FROM build"
|
||||
|
||||
let uuids_hex_encoded_q =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@
|
||||
"SELECT id, uuid FROM build"
|
||||
|
||||
let migrate_q =
|
||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build SET uuid = $2 WHERE id = $1"
|
||||
|
||||
let rollback_q =
|
||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build SET uuid = $2 WHERE id = $1"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Grej.Infix in
|
||||
Grej.check_version ~user_version:old_version (module Db) >>= fun () ->
|
||||
Db.collect_list uuids_byte_encoded_q () >>= fun old_uuids ->
|
||||
Grej.list_iter_result (Db.exec migrate_q) old_uuids >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let open Grej.Infix in
|
||||
Grej.check_version ~user_version:new_version (module Db) >>= fun () ->
|
||||
Db.collect_list uuids_hex_encoded_q () >>= fun new_uuids ->
|
||||
Grej.list_iter_result (Db.exec rollback_q) new_uuids >>= fun () ->
|
||||
Db.exec (Grej.set_version old_version) ()
|
32
bin/migrations/m20230911.ml
Normal file
32
bin/migrations/m20230911.ml
Normal file
|
@ -0,0 +1,32 @@
|
|||
let new_version = 17L and old_version = 16L
|
||||
and identifier = "2023-09-11"
|
||||
and migrate_doc = "index failed builds on main binary is null"
|
||||
and rollback_doc = "index failed builds on exit code"
|
||||
|
||||
open Grej.Syntax
|
||||
|
||||
let drop_idx_build_failed =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
"DROP INDEX idx_build_failed"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
||||
let* () = Db.exec drop_idx_build_failed () in
|
||||
let* () =
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
||||
WHERE main_binary IS NULL")
|
||||
()
|
||||
in
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
||||
let* () = Db.exec drop_idx_build_failed () in
|
||||
let* () =
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
||||
WHERE result_code <> 0")
|
||||
()
|
||||
in
|
||||
Db.exec (Grej.set_version old_version) ()
|
162
bin/migrations/m20230914.ml
Normal file
162
bin/migrations/m20230914.ml
Normal file
|
@ -0,0 +1,162 @@
|
|||
let new_version = 18L and old_version = 17L
|
||||
and identifier = "2023-09-14"
|
||||
and migrate_doc = "Artifacts are stored content-addressed in the filesystem"
|
||||
and rollback_doc = "Artifacts are stored under their build's job name and uuid"
|
||||
|
||||
open Grej.Syntax
|
||||
|
||||
let new_build_artifact =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build_artifact (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL,
|
||||
sha256 BLOB NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
build INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(build) REFERENCES build(id),
|
||||
UNIQUE(build, filepath)
|
||||
)
|
||||
|}
|
||||
|
||||
let old_build_artifact =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build_artifact (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
localpath TEXT NOT NULL, -- local path to the file on disk
|
||||
sha256 BLOB NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
build INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(build) REFERENCES build(id),
|
||||
UNIQUE(build, filepath)
|
||||
)
|
||||
|}
|
||||
|
||||
let idx_build_artifact_sha256 =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)"
|
||||
|
||||
let idx_build_artifact_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)"
|
||||
|
||||
let copy_new_build_artifact =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
{| INSERT INTO new_build_artifact(id, filepath, sha256, size, build)
|
||||
SELECT id, filepath, sha256, size, build
|
||||
FROM build_artifact
|
||||
|}
|
||||
|
||||
let copy_old_build_artifact =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
{| INSERT INTO new_build_artifact(id, filepath, localpath, sha256, size, build)
|
||||
SELECT a.id, a.filepath,
|
||||
j.name || '/' || b.uuid || '/output/' || a.filepath,
|
||||
a.sha256, a.size, a.build
|
||||
FROM build_artifact a, job j, build b
|
||||
WHERE b.id = a.build AND j.id = b.job
|
||||
|}
|
||||
|
||||
let new_build_artifact_paths =
|
||||
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
|
||||
{| SELECT localpath, '_artifacts/' || substr(lower(hex(sha256)), 1, 2) || '/' || lower(hex(sha256))
|
||||
FROM build_artifact
|
||||
|}
|
||||
|
||||
let old_build_artifact_paths =
|
||||
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
|
||||
{| SELECT '_artifacts/' || substr(lower(hex(a.sha256)), 1, 2) || '/' || lower(hex(a.sha256)),
|
||||
j.name || '/' || b.uuid || '/output/' || a.filepath
|
||||
FROM build_artifact a, job j, build b
|
||||
WHERE b.id = a.build AND j.id = b.job
|
||||
|}
|
||||
|
||||
let drop_build_artifact =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
"DROP TABLE build_artifact"
|
||||
|
||||
let rename_build_artifact =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
"ALTER TABLE new_build_artifact RENAME TO build_artifact"
|
||||
|
||||
let move_paths ?force datadir (old_path, new_path) =
|
||||
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
|
||||
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
|
||||
Bos.OS.Path.move ?force old_path new_path
|
||||
|
||||
let copy_paths datadir (old_path, new_path) =
|
||||
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
|
||||
let new_path_tmp = Fpath.(new_path + "tmp") in
|
||||
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
|
||||
let cmd = Bos.Cmd.(v "cp" % p old_path % p new_path_tmp) in
|
||||
let* () =
|
||||
match Bos.OS.Cmd.run_status cmd with
|
||||
| Ok `Exited 0 ->
|
||||
Ok ()
|
||||
| Ok status ->
|
||||
let _ = Bos.OS.Path.delete new_path_tmp in
|
||||
Error (`Msg (Fmt.str "cp failed: %a" Bos.OS.Cmd.pp_status status))
|
||||
| Error _ as e ->
|
||||
let _ = Bos.OS.Path.delete new_path_tmp in
|
||||
e
|
||||
in
|
||||
Bos.OS.Path.move ~force:true new_path_tmp new_path
|
||||
|
||||
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
||||
let* () = Db.exec new_build_artifact () in
|
||||
let* () = Db.exec copy_new_build_artifact () in
|
||||
let* () = Db.iter_s new_build_artifact_paths (move_paths ~force:true datadir) () in
|
||||
let* () = Db.exec drop_build_artifact () in
|
||||
let* () = Db.exec rename_build_artifact () in
|
||||
let* () = Db.exec idx_build_artifact_sha256 () in
|
||||
let* () = Db.exec idx_build_artifact_build () in
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
||||
let* () = Db.exec old_build_artifact () in
|
||||
let* () = Db.exec copy_old_build_artifact () in
|
||||
let* () = Db.iter_s old_build_artifact_paths (copy_paths datadir) () in
|
||||
let* () =
|
||||
Db.iter_s old_build_artifact_paths
|
||||
(fun (old_path, _new_path) ->
|
||||
Bos.OS.Path.delete Fpath.(datadir // v old_path))
|
||||
()
|
||||
in
|
||||
let* () = Db.exec drop_build_artifact () in
|
||||
let* () = Db.exec rename_build_artifact () in
|
||||
let* () = Db.exec idx_build_artifact_sha256 () in
|
||||
Db.exec (Grej.set_version old_version) ()
|
||||
|
||||
(* migration failed but managed to move *some* files *)
|
||||
let fixup_migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
||||
let* () =
|
||||
Db.iter_s new_build_artifact_paths
|
||||
(fun (old_path, new_path) ->
|
||||
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
|
||||
let* new_exists = Bos.OS.Path.exists Fpath.(datadir // v new_path) in
|
||||
if new_exists && not old_exists then
|
||||
copy_paths datadir (new_path, old_path)
|
||||
else Ok ())
|
||||
()
|
||||
in
|
||||
Db.iter_s new_build_artifact_paths
|
||||
(fun (_old_path, new_path) ->
|
||||
Bos.OS.Path.delete Fpath.(datadir // v new_path))
|
||||
()
|
||||
|
||||
(* rollback failed but some or all artifacts were copied *)
|
||||
let fixup_rollback datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
||||
Db.iter_s old_build_artifact_paths
|
||||
(fun (old_path, new_path) ->
|
||||
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
|
||||
if old_exists then
|
||||
Bos.OS.Path.delete Fpath.(datadir // v new_path)
|
||||
else
|
||||
move_paths datadir (new_path, old_path))
|
||||
()
|
|
@ -1,55 +1,65 @@
|
|||
opam-version: "2.0"
|
||||
maintainer: "Reynir Björnsson <reynir@reynir.dk>"
|
||||
authors: ["Reynir Björnsson <reynir@reynir.dk>"]
|
||||
homepage: "https://github.com/roburio/builder-web"
|
||||
dev-repo: "git+https://github.com/roburio/builder-web.git"
|
||||
bug-reports: "https://github.com/roburio/builder-web/issues"
|
||||
homepage: "https://github.com/robur-coop/builder-web"
|
||||
dev-repo: "git+https://github.com/robur-coop/builder-web.git"
|
||||
bug-reports: "https://github.com/robur-coop/builder-web/issues"
|
||||
license: "ISC"
|
||||
|
||||
build: [
|
||||
["dune" "subst"] {dev}
|
||||
["dune" "build" "-p" name "-j" jobs]
|
||||
["dune" "runtest" "-p" name "-j" jobs] {with-test}
|
||||
["sh" "packaging/FreeBSD/create_package.sh"] {os = "freebsd"}
|
||||
]
|
||||
|
||||
install: [
|
||||
["cp" "builder_web.txz" "%{bin}%/"] {os = "freebsd"}
|
||||
["sh" "-ex" "packaging/FreeBSD/create_package.sh"] {os = "freebsd"}
|
||||
["sh" "-ex" "packaging/debian/create_package.sh"] {os-family = "debian"}
|
||||
]
|
||||
|
||||
depends: [
|
||||
"builder"
|
||||
"dream"
|
||||
"ocaml" {>= "4.13.0"}
|
||||
"dune" {>= "2.7.0"}
|
||||
"builder" {>= "0.4.0"}
|
||||
"dream" {>= "1.0.0~alpha7"}
|
||||
"bos"
|
||||
"hex"
|
||||
"caqti"
|
||||
"ohex" {>= "0.2.0"}
|
||||
"lwt" {>= "5.7.0"}
|
||||
"caqti" {>= "2.1.2"}
|
||||
"caqti-lwt"
|
||||
"caqti-driver-sqlite3"
|
||||
"pbkdf"
|
||||
"mirage-crypto-rng"
|
||||
"scrypt-kdf"
|
||||
"alcotest" {with-test}
|
||||
"mirage-crypto-rng" {>= "0.11.0"}
|
||||
"kdf"
|
||||
"opam-core"
|
||||
"opam-format"
|
||||
"metrics"
|
||||
"metrics-lwt"
|
||||
"metrics-influx"
|
||||
"metrics-rusage"
|
||||
"opam-format" {>= "2.1.0"}
|
||||
"metrics" {>= "0.3.0"}
|
||||
"metrics-lwt" {>= "0.3.0"}
|
||||
"metrics-influx" {>= "0.3.0"}
|
||||
"metrics-rusage" {>= "0.3.0"}
|
||||
"ipaddr"
|
||||
"tyxml"
|
||||
"tyxml" {>= "4.3.0"}
|
||||
"ptime"
|
||||
"duration"
|
||||
"rresult"
|
||||
"mirage-crypto"
|
||||
"asn1-combinators"
|
||||
"asn1-combinators" {>= "0.3.0"}
|
||||
"logs"
|
||||
"cmdliner"
|
||||
"cmdliner" {>= "1.1.0"}
|
||||
"uri"
|
||||
"fmt"
|
||||
"fmt" {>= "0.8.7"}
|
||||
"cmarkit" {>= "0.3.0"}
|
||||
"tar" {>= "3.0.0"}
|
||||
"tar-unix" {>= "3.0.0"}
|
||||
"owee"
|
||||
"solo5-elftool" {>= "0.3.0"}
|
||||
"decompress" {>= "1.5.0"}
|
||||
"digestif" {>= "1.2.0"}
|
||||
"uuidm" {>= "0.9.9"}
|
||||
"yojson"
|
||||
"alcotest" {>= "1.2.0" & with-test}
|
||||
"ppx_deriving" {with-test}
|
||||
"ppx_deriving_yojson" {with-test}
|
||||
]
|
||||
|
||||
synopsis: "Web interface for builder"
|
||||
|
||||
pin-depends: [
|
||||
["builder.dev" "git+https://github.com/roburio/builder.git"]
|
||||
]
|
||||
description: """
|
||||
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
||||
Produced binaries can be downloaded and executed.
|
||||
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
|
||||
"""
|
||||
x-maintenance-intent: [ "(latest)" ]
|
||||
|
|
743
db/builder_db.ml
743
db/builder_db.ml
|
@ -1,53 +1,42 @@
|
|||
module Rep = Representation
|
||||
open Rep
|
||||
open Caqti_request.Infix
|
||||
|
||||
let application_id = 1234839235l
|
||||
|
||||
(* Please update this when making changes! *)
|
||||
let current_version = 7L
|
||||
(* Please update this when making changes! And also update
|
||||
packaging/batch-viz.sh and packaging/visualizations.sh. *)
|
||||
let current_version = 18L
|
||||
|
||||
type id = Rep.id
|
||||
type 'a id = 'a Rep.id
|
||||
|
||||
type file = Rep.file = {
|
||||
filepath : Fpath.t;
|
||||
localpath : Fpath.t;
|
||||
sha256 : Cstruct.t;
|
||||
sha256 : string;
|
||||
size : int;
|
||||
}
|
||||
|
||||
let last_insert_rowid =
|
||||
Caqti_request.find
|
||||
Caqti_type.unit
|
||||
id
|
||||
"SELECT last_insert_rowid()"
|
||||
|
||||
let last_insert_rowid = Rep.last_insert_rowid
|
||||
|
||||
let get_application_id =
|
||||
Caqti_request.find
|
||||
Caqti_type.unit
|
||||
Caqti_type.int32
|
||||
Caqti_type.unit ->! Caqti_type.int32 @@
|
||||
"PRAGMA application_id"
|
||||
|
||||
let get_version =
|
||||
Caqti_request.find
|
||||
Caqti_type.unit
|
||||
Caqti_type.int64
|
||||
Caqti_type.unit ->! Caqti_type.int64 @@
|
||||
"PRAGMA user_version"
|
||||
|
||||
let set_application_id =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
(Printf.sprintf "PRAGMA application_id = %ld" application_id)
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
Printf.sprintf "PRAGMA application_id = %ld" application_id
|
||||
|
||||
let set_current_version =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
(Printf.sprintf "PRAGMA user_version = %Ld" current_version)
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
Printf.sprintf "PRAGMA user_version = %Ld" current_version
|
||||
|
||||
module Job = struct
|
||||
let migrate =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE job (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
name VARCHAR(255) NOT NULL UNIQUE
|
||||
|
@ -55,47 +44,102 @@ module Job = struct
|
|||
|}
|
||||
|
||||
let rollback =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
{| DROP TABLE IF EXISTS job |}
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE IF EXISTS job"
|
||||
|
||||
let get =
|
||||
Caqti_request.find
|
||||
id
|
||||
Caqti_type.string
|
||||
id `job ->! Caqti_type.string @@
|
||||
"SELECT name FROM job WHERE id = ?"
|
||||
|
||||
let get_id_by_name =
|
||||
Caqti_request.find_opt
|
||||
Caqti_type.string
|
||||
id
|
||||
Caqti_type.string ->? id `job @@
|
||||
"SELECT id FROM job WHERE name = ?"
|
||||
|
||||
let get_all =
|
||||
Caqti_request.collect
|
||||
Caqti_type.unit
|
||||
Caqti_type.(tup2 id string)
|
||||
"SELECT id, name FROM job ORDER BY name ASC"
|
||||
let get_all_with_section_synopsis =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 (id `job) string (option string) (option string)) @@
|
||||
{| SELECT j.id, j.name, section.value, synopsis.value
|
||||
FROM job j, tag section_tag, tag synopsis_tag
|
||||
LEFT JOIN job_tag section ON section.job = j.id AND section.tag = section_tag.id
|
||||
LEFT JOIN job_tag synopsis ON synopsis.job = j.id AND synopsis.tag = synopsis_tag.id
|
||||
WHERE section_tag.tag = 'section' AND synopsis_tag.tag = 'synopsis'
|
||||
ORDER BY section.value, j.name ASC
|
||||
|}
|
||||
|
||||
let try_add =
|
||||
Caqti_request.exec
|
||||
Caqti_type.string
|
||||
Caqti_type.string ->. Caqti_type.unit @@
|
||||
"INSERT OR IGNORE INTO job (name) VALUES (?)"
|
||||
|
||||
let remove =
|
||||
Caqti_request.exec
|
||||
id
|
||||
id `job ->. Caqti_type.unit @@
|
||||
"DELETE FROM job WHERE id = ?"
|
||||
end
|
||||
|
||||
module Tag = struct
|
||||
let migrate =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE tag (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
tag VARCHAR(255) NOT NULL UNIQUE
|
||||
)
|
||||
|}
|
||||
|
||||
let rollback =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE IF EXISTS tag"
|
||||
|
||||
let get_id_by_name =
|
||||
Caqti_type.string ->! id `tag @@
|
||||
"SELECT id FROM tag WHERE tag = ?"
|
||||
|
||||
let try_add =
|
||||
Caqti_type.string ->. Caqti_type.unit @@
|
||||
"INSERT OR IGNORE INTO tag (tag) VALUES (?)"
|
||||
end
|
||||
|
||||
module Job_tag = struct
|
||||
let migrate =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE job_tag (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
tag INTEGER NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(job) REFERENCES job(id),
|
||||
FOREIGN KEY(tag) REFERENCES tag(id),
|
||||
UNIQUE(tag, job)
|
||||
)
|
||||
|}
|
||||
|
||||
let rollback =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE IF EXISTS job_tag"
|
||||
|
||||
let add =
|
||||
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
||||
"INSERT INTO job_tag (tag, value, job) VALUES ($1, $2, $3)"
|
||||
|
||||
let update =
|
||||
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
||||
"UPDATE job_tag SET value = $2 WHERE tag = $1 AND job = $3"
|
||||
|
||||
let get_value =
|
||||
Caqti_type.(t2 (id `tag) (id `job)) ->? Caqti_type.string @@
|
||||
"SELECT value FROM job_tag WHERE tag = ? AND job = ?"
|
||||
|
||||
let remove_by_job =
|
||||
id `job ->. Caqti_type.unit @@
|
||||
"DELETE FROM job_tag WHERE job = ?"
|
||||
|
||||
end
|
||||
|
||||
module Build_artifact = struct
|
||||
let migrate =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE build_artifact (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
localpath TEXT NOT NULL, -- local path to the file on disk
|
||||
sha256 BLOB NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
build INTEGER NOT NULL,
|
||||
|
@ -106,101 +150,43 @@ module Build_artifact = struct
|
|||
|}
|
||||
|
||||
let rollback =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE IF EXISTS build_artifact"
|
||||
|
||||
let get_by_build =
|
||||
Caqti_request.find
|
||||
(Caqti_type.tup2 id fpath)
|
||||
(Caqti_type.tup2 id file)
|
||||
{| SELECT id, filepath, localpath, sha256, size
|
||||
FROM build_artifact
|
||||
WHERE build = ? AND filepath = ?
|
||||
|}
|
||||
let get =
|
||||
id `build_artifact ->! file @@
|
||||
{| SELECT filepath, sha256, size
|
||||
FROM build_artifact WHERE id = ? |}
|
||||
|
||||
let get_by_build_uuid =
|
||||
Caqti_request.find_opt
|
||||
(Caqti_type.tup2 uuid fpath)
|
||||
(Caqti_type.tup2 id file)
|
||||
Caqti_type.t2 uuid fpath ->? Caqti_type.t2 (id `build_artifact) file @@
|
||||
{| SELECT build_artifact.id, build_artifact.filepath,
|
||||
build_artifact.localpath, build_artifact.sha256, build_artifact.size
|
||||
build_artifact.sha256, build_artifact.size
|
||||
FROM build_artifact
|
||||
INNER JOIN build ON build.id = build_artifact.build
|
||||
WHERE build.uuid = ? AND build_artifact.filepath = ?
|
||||
|}
|
||||
|
||||
let get_all_by_build =
|
||||
Caqti_request.collect
|
||||
id
|
||||
Caqti_type.(tup2
|
||||
id
|
||||
file)
|
||||
"SELECT id, filepath, localpath, sha256, size FROM build_artifact WHERE build = ?"
|
||||
id `build ->* Caqti_type.(t2 (id `build_artifact) file) @@
|
||||
"SELECT id, filepath, sha256, size FROM build_artifact WHERE build = ?"
|
||||
|
||||
let exists =
|
||||
Caqti_type.octets ->! Caqti_type.bool @@
|
||||
"SELECT EXISTS(SELECT 1 FROM build_artifact WHERE sha256 = ?)"
|
||||
|
||||
let add =
|
||||
Caqti_request.exec
|
||||
Caqti_type.(tup2 file id)
|
||||
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build)
|
||||
VALUES (?, ?, ?, ?, ?)"
|
||||
Caqti_type.(t2 file (id `build)) ->. Caqti_type.unit @@
|
||||
"INSERT INTO build_artifact (filepath, sha256, size, build) \
|
||||
VALUES (?, ?, ?, ?)"
|
||||
|
||||
let remove_by_build =
|
||||
Caqti_request.exec
|
||||
id
|
||||
id `build ->. Caqti_type.unit @@
|
||||
"DELETE FROM build_artifact WHERE build = ?"
|
||||
end
|
||||
|
||||
module Build_file = struct
|
||||
let migrate =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
{| CREATE TABLE build_file (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
localpath TEXT NOT NULL, -- local path to the file on disk
|
||||
sha256 BLOB NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
build INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(build) REFERENCES build(id),
|
||||
UNIQUE(build, filepath)
|
||||
)
|
||||
|}
|
||||
|
||||
let rollback =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
"DROP TABLE IF EXISTS build_file"
|
||||
|
||||
let get_by_build_uuid =
|
||||
Caqti_request.find_opt
|
||||
(Caqti_type.tup2 uuid fpath)
|
||||
(Caqti_type.tup2 id file)
|
||||
{| SELECT build_file.id, build_file.localpath,
|
||||
build_file.localpath, build_file.sha256, build_file.size
|
||||
FROM build_file
|
||||
INNER JOIN build ON build.id = build_file.build
|
||||
WHERE build.uuid = ? AND build_file.filepath = ?
|
||||
|}
|
||||
|
||||
let get_all_by_build =
|
||||
Caqti_request.collect
|
||||
id
|
||||
Caqti_type.(tup2
|
||||
id
|
||||
file)
|
||||
"SELECT id, filepath, localpath, sha256, size FROM build_file WHERE build = ?"
|
||||
|
||||
let add =
|
||||
Caqti_request.exec
|
||||
Caqti_type.(tup2 file id)
|
||||
"INSERT INTO build_file (filepath, localpath, sha256, size, build)
|
||||
VALUES (?, ?, ?, ?, ?)"
|
||||
|
||||
let remove_by_build =
|
||||
Caqti_request.exec
|
||||
id
|
||||
"DELETE FROM build_file WHERE build = ?"
|
||||
let remove =
|
||||
id `build_artifact ->. Caqti_type.unit @@
|
||||
"DELETE FROM build_artifact WHERE id = ?"
|
||||
end
|
||||
|
||||
module Build = struct
|
||||
|
@ -209,74 +195,65 @@ module Build = struct
|
|||
start : Ptime.t;
|
||||
finish : Ptime.t;
|
||||
result : Builder.execution_result;
|
||||
console : (int * string) list;
|
||||
script : string;
|
||||
main_binary : id option;
|
||||
user_id : id;
|
||||
job_id : id;
|
||||
console : Fpath.t;
|
||||
script : Fpath.t;
|
||||
platform : string;
|
||||
main_binary : [`build_artifact] id option;
|
||||
input_id : string option;
|
||||
user_id : [`user] id;
|
||||
job_id : [`job] id;
|
||||
}
|
||||
|
||||
let pp ppf t =
|
||||
Fmt.pf ppf "@[<hov>{ uuid=@ %a;@ \
|
||||
start=@ %a;@ \
|
||||
finish=@ %a;@ \
|
||||
result=@ @[<hov>%a@];@ \
|
||||
console=@ %a;@ \
|
||||
script=@ %a;@ \
|
||||
platform=@ %S;@ \
|
||||
main_binary=@ @[<hov>%a@];@ \
|
||||
input_id=@ @[<hov>%a@];@ \
|
||||
user_id=@ %Lx;@ \
|
||||
job_id=@ %Lx;@ }@]"
|
||||
Uuidm.pp t.uuid
|
||||
Ptime.pp t.start
|
||||
Ptime.pp t.finish
|
||||
Builder.pp_execution_result t.result
|
||||
Fpath.pp t.console
|
||||
Fpath.pp t.script
|
||||
t.platform
|
||||
Fmt.(Dump.option int64) t.main_binary
|
||||
Fmt.(Dump.option string) t.input_id
|
||||
t.user_id
|
||||
t.job_id
|
||||
|
||||
|
||||
let t =
|
||||
let rep =
|
||||
Caqti_type.(tup3
|
||||
(tup4
|
||||
Caqti_type.(t11
|
||||
uuid
|
||||
(tup2
|
||||
Rep.ptime
|
||||
Rep.ptime)
|
||||
(tup2
|
||||
Rep.ptime
|
||||
execution_result
|
||||
console)
|
||||
(tup2
|
||||
fpath
|
||||
fpath
|
||||
string
|
||||
(option Rep.id)))
|
||||
id
|
||||
id)
|
||||
(option (Rep.id `build_artifact))
|
||||
(option octets)
|
||||
(id `user)
|
||||
(id `job))
|
||||
in
|
||||
let encode { uuid; start; finish; result; console; script; main_binary; user_id; job_id } =
|
||||
Ok ((uuid, (start, finish), (result, console), (script, main_binary)), user_id, job_id)
|
||||
let encode { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id } =
|
||||
Ok (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id)
|
||||
in
|
||||
let decode ((uuid, (start, finish), (result, console), (script, main_binary)), user_id, job_id) =
|
||||
Ok { uuid; start; finish; result; console; script; main_binary; user_id; job_id }
|
||||
let decode (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id) =
|
||||
Ok { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id }
|
||||
in
|
||||
Caqti_type.custom ~encode ~decode rep
|
||||
|
||||
module Meta = struct
|
||||
type t = {
|
||||
uuid : Uuidm.t;
|
||||
start : Ptime.t;
|
||||
finish : Ptime.t;
|
||||
result : Builder.execution_result;
|
||||
main_binary : id option;
|
||||
user_id : id;
|
||||
job_id : id;
|
||||
}
|
||||
|
||||
let t =
|
||||
let rep =
|
||||
Caqti_type.(tup3
|
||||
(tup4
|
||||
uuid
|
||||
(tup2
|
||||
Rep.ptime
|
||||
Rep.ptime)
|
||||
execution_result
|
||||
(option Rep.id))
|
||||
id
|
||||
id)
|
||||
in
|
||||
let encode { uuid; start; finish; result; main_binary; user_id; job_id } =
|
||||
Ok ((uuid, (start, finish), result, main_binary), user_id, job_id)
|
||||
in
|
||||
let decode ((uuid, (start, finish), result, main_binary), user_id, job_id) =
|
||||
Ok { uuid; start; finish; result; main_binary; user_id; job_id }
|
||||
in
|
||||
Caqti_type.custom ~encode ~decode rep
|
||||
end
|
||||
|
||||
let migrate =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE build (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
uuid VARCHAR(36) NOT NULL UNIQUE,
|
||||
|
@ -284,155 +261,263 @@ module Build = struct
|
|||
start_ps INTEGER NOT NULL,
|
||||
finish_d INTEGER NOT NULL,
|
||||
finish_ps INTEGER NOT NULL,
|
||||
result_kind TINYINT NOT NULL,
|
||||
result_code INTEGER,
|
||||
result_code INTEGER NOT NULL,
|
||||
result_msg TEXT,
|
||||
console BLOB NOT NULL,
|
||||
console TEXT NOT NULL,
|
||||
script TEXT NOT NULL,
|
||||
platform TEXT NOT NULL,
|
||||
main_binary INTEGER,
|
||||
user INTEGER NOT NULL,
|
||||
job INTEGER NOT NULL,
|
||||
input_id BLOB, -- sha256 (sha256<opam-switch> || sha256<build-environment> || sha256<system-packages>)
|
||||
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id),
|
||||
FOREIGN KEY(main_binary) REFERENCES build_artifact(id) DEFERRABLE INITIALLY DEFERRED,
|
||||
FOREIGN KEY(user) REFERENCES user(id),
|
||||
FOREIGN KEY(job) REFERENCES job(id)
|
||||
)
|
||||
|}
|
||||
|
||||
let rollback =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
{| DROP TABLE IF EXISTS build |}
|
||||
|
||||
let get_opt =
|
||||
Caqti_request.find_opt
|
||||
Caqti_type.int64
|
||||
t
|
||||
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_kind, result_code, result_msg,
|
||||
console, script, main_binary, user, job
|
||||
FROM build
|
||||
WHERE id = ?
|
||||
|}
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE IF EXISTS build"
|
||||
|
||||
let get_by_uuid =
|
||||
Caqti_request.find_opt
|
||||
Rep.uuid
|
||||
(Caqti_type.tup2 id t)
|
||||
Rep.uuid ->? Caqti_type.t2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_kind, result_code, result_msg,
|
||||
console, script, main_binary, user, job
|
||||
result_code, result_msg,
|
||||
console, script, platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE uuid = ?
|
||||
|}
|
||||
|
||||
let get_all =
|
||||
Caqti_request.collect
|
||||
Caqti_type.int64
|
||||
(Caqti_type.tup2 id t)
|
||||
id `job ->* Caqti_type.t2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_kind, result_code, result_msg, console,
|
||||
script, main_binary, user, job
|
||||
result_code, result_msg, console,
|
||||
script, platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = ?
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
|}
|
||||
|
||||
let get_all_meta =
|
||||
Caqti_request.collect
|
||||
Caqti_type.int64
|
||||
(Caqti_type.tup3
|
||||
id Meta.t file_opt)
|
||||
{| SELECT build.id, build.uuid,
|
||||
build.start_d, build.start_ps, build.finish_d, build.finish_ps,
|
||||
build.result_kind, build.result_code, build.result_msg,
|
||||
build.main_binary, build.user, build.job,
|
||||
build_artifact.filepath, build_artifact.localpath, build_artifact.sha256, build_artifact.size
|
||||
FROM build, job
|
||||
LEFT JOIN build_artifact ON
|
||||
build.main_binary = build_artifact.id
|
||||
WHERE job.id = ? AND build.job = job.id
|
||||
let get_all_failed =
|
||||
Caqti_type.(t3 int int (option string)) ->* Caqti_type.t2 Caqti_type.string t @@
|
||||
{| SELECT job.name, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script, b.platform,
|
||||
b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build b
|
||||
INNER JOIN job ON job.id = b.job
|
||||
WHERE b.main_binary IS NULL AND ($3 IS NULL OR b.platform = $3)
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
LIMIT $2
|
||||
OFFSET $1
|
||||
|}
|
||||
|
||||
let get_all_artifact_sha =
|
||||
Caqti_type.(t2 (id `job) (option string)) ->* Caqti_type.octets @@
|
||||
{| SELECT DISTINCT a.sha256
|
||||
FROM build_artifact a, build b
|
||||
WHERE b.job = $1 AND b.main_binary = a.id
|
||||
AND ($2 IS NULL OR b.platform = $2)
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
|}
|
||||
|
||||
let get_failed_builds =
|
||||
Caqti_type.(t2 (id `job) (option string)) ->* t @@
|
||||
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = $1
|
||||
AND main_binary IS NULL
|
||||
AND ($2 IS NULL OR platform = $2)
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
|}
|
||||
|
||||
let get_latest =
|
||||
Caqti_request.find_opt
|
||||
id
|
||||
Caqti_type.(tup3
|
||||
id
|
||||
Meta.t
|
||||
file_opt)
|
||||
let get_latest_successful_with_binary =
|
||||
Caqti_type.(t2 (id `job) string) ->? Caqti_type.t3 (id `build) t file @@
|
||||
{| SELECT b.id,
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_kind, b.result_code, b.result_msg,
|
||||
b.main_binary, b.user, b.job,
|
||||
a.filepath, a.localpath, a.sha256, a.size
|
||||
FROM build b
|
||||
LEFT JOIN build_artifact a ON
|
||||
b.main_binary = a.id
|
||||
WHERE b.job = ?
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job,
|
||||
a.filepath, a.sha256, a.size
|
||||
FROM build b, build_artifact a
|
||||
WHERE b.main_binary = a.id AND b.job = $1 AND b.platform = $2
|
||||
AND b.main_binary IS NOT NULL
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_latest_uuid =
|
||||
Caqti_request.find_opt
|
||||
id
|
||||
Caqti_type.(tup2 id Rep.uuid)
|
||||
{| SELECT b.id, b.uuid
|
||||
FROM build b
|
||||
WHERE b.job = ?
|
||||
let get_builds_older_than =
|
||||
Caqti_type.(t3 (id `job) (option string) Rep.ptime) ->* Caqti_type.t2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = $1
|
||||
AND ($2 IS NULL OR platform = $2)
|
||||
AND (finish_d < $3 OR (finish_d = $3 AND finish_ps <= $4))
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_latest_successful_uuid =
|
||||
Caqti_request.find_opt
|
||||
id
|
||||
Rep.uuid
|
||||
{| SELECT b.uuid
|
||||
FROM build b
|
||||
WHERE b.job = ? AND b.result_kind = 0 AND b.result_code = 0
|
||||
let get_builds_excluding_latest_n =
|
||||
Caqti_type.(t3 (id `job) (option string) int) ->* Caqti_type.t2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = $1
|
||||
AND ($2 IS NULL OR platform = $2)
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
LIMIT 1
|
||||
LIMIT -1 OFFSET $3
|
||||
|}
|
||||
(* "LIMIT -1 OFFSET n" is all rows except the first n *)
|
||||
|
||||
let get_nth_latest_successful =
|
||||
Caqti_type.(t3 (id `job) (option string) int) ->? Caqti_type.t2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = $1
|
||||
AND ($2 IS NULL OR platform = $2)
|
||||
AND main_binary IS NOT NULL
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
LIMIT 1 OFFSET $3
|
||||
|}
|
||||
|
||||
let get_previous_successful =
|
||||
Caqti_request.find_opt
|
||||
id
|
||||
Caqti_type.(tup2 id Meta.t)
|
||||
{| SELECT b.id,
|
||||
let get_latest_successful =
|
||||
Caqti_type.(t2 (id `job) (option string)) ->? t @@
|
||||
{| SELECT
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_kind, b.result_code, b.result_msg,
|
||||
b.main_binary, b.user, b.job
|
||||
FROM build b, build b0
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build b
|
||||
WHERE b.job = $1
|
||||
AND ($2 IS NULL OR b.platform = $2)
|
||||
AND b.main_binary IS NOT NULL
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_previous_successful_different_output =
|
||||
id `build ->? t @@
|
||||
{| SELECT
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build b, build b0, build_artifact a, build_artifact a0
|
||||
WHERE b0.id = ? AND b0.job = b.job AND
|
||||
b.result_kind = 0 AND b.result_code = 0 AND
|
||||
b.platform = b0.platform AND
|
||||
b.main_binary IS NOT NULL AND
|
||||
a.id = b.main_binary AND a0.id = b0.main_binary AND
|
||||
a.sha256 <> a0.sha256 AND
|
||||
(b0.start_d > b.start_d OR b0.start_d = b.start_d AND b0.start_ps > b.start_ps)
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_next_successful_different_output =
|
||||
id `build ->? t @@
|
||||
{| SELECT
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build b, build b0, build_artifact a, build_artifact a0
|
||||
WHERE b0.id = ? AND b0.job = b.job AND
|
||||
b.platform = b0.platform AND
|
||||
b.main_binary IS NOT NULL AND
|
||||
a.id = b.main_binary AND a0.id = b0.main_binary AND
|
||||
a.sha256 <> a0.sha256 AND
|
||||
(b0.start_d < b.start_d OR b0.start_d = b.start_d AND b0.start_ps < b.start_ps)
|
||||
ORDER BY b.start_d ASC, b.start_ps ASC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_same_input_same_output_builds =
|
||||
id `build ->* t @@
|
||||
{| SELECT b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build b0, build_artifact a0, build b, build_artifact a
|
||||
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 = a.sha256
|
||||
AND b.main_binary = a.id AND b.id <> b0.id AND b0.input_id = b.input_id
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
|}
|
||||
|
||||
let get_same_input_different_output_hashes =
|
||||
id `build ->* Caqti_type.octets @@
|
||||
{| SELECT DISTINCT a.sha256
|
||||
FROM build b0, build_artifact a0, build b, build_artifact a
|
||||
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 <> a.sha256
|
||||
AND b.main_binary = a.id AND b.id <> b0.id AND b0.input_id = b.input_id
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
|}
|
||||
|
||||
let get_different_input_same_output_input_ids =
|
||||
id `build ->* Caqti_type.octets @@
|
||||
{| SELECT DISTINCT b.input_id
|
||||
FROM build b0, build_artifact a0, build b, build_artifact a
|
||||
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 = a.sha256
|
||||
AND b.main_binary = a.id AND b0.input_id <> b.input_id
|
||||
|}
|
||||
|
||||
let get_one_by_input_id =
|
||||
Caqti_type.octets ->! t @@
|
||||
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE input_id = ?
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_platforms_for_job =
|
||||
id `job ->* Caqti_type.string @@
|
||||
"SELECT DISTINCT platform FROM build WHERE job = ? ORDER BY platform"
|
||||
|
||||
let add =
|
||||
Caqti_request.exec
|
||||
t
|
||||
t ->. Caqti_type.unit @@
|
||||
{| INSERT INTO build
|
||||
(uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_kind, result_code, result_msg, console, script, main_binary, user, job)
|
||||
result_code, result_msg, console, script, platform, main_binary, input_id, user, job)
|
||||
VALUES
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
|}
|
||||
|
||||
let get_by_hash =
|
||||
Caqti_request.find_opt
|
||||
Rep.cstruct
|
||||
(Caqti_type.tup2
|
||||
Caqti_type.string
|
||||
t)
|
||||
Caqti_type.octets ->! t @@
|
||||
{| SELECT
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build_artifact a
|
||||
INNER JOIN build b ON b.id = a.build
|
||||
WHERE a.sha256 = ?
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_with_main_binary_by_hash =
|
||||
Caqti_type.octets ->! Caqti_type.t2 t file_opt @@
|
||||
{| SELECT b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job,
|
||||
a.filepath, a.sha256, a.size
|
||||
FROM build_artifact a
|
||||
INNER JOIN build b ON b.id = a.build
|
||||
WHERE a.sha256 = ?
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_with_jobname_by_hash =
|
||||
Caqti_type.octets ->? Caqti_type.t2 Caqti_type.string t @@
|
||||
{| SELECT job.name,
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_kind, b.result_code, b.result_msg,
|
||||
b.console, b.script, b.main_binary, b.user, b.job
|
||||
b.result_code, b.result_msg,
|
||||
b.console, b.script, b.platform, b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build_artifact a
|
||||
INNER JOIN build b ON b.id = a.build
|
||||
INNER JOIN job ON job.id = b.job
|
||||
|
@ -442,20 +527,17 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let set_main_binary =
|
||||
Caqti_request.exec
|
||||
(Caqti_type.tup2 id id)
|
||||
"UPDATE build SET main_binary = ?2 WHERE id = ?1"
|
||||
Caqti_type.t2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@
|
||||
"UPDATE build SET main_binary = $2 WHERE id = $1"
|
||||
|
||||
let remove =
|
||||
Caqti_request.exec
|
||||
id
|
||||
id `build ->. Caqti_type.unit @@
|
||||
"DELETE FROM build WHERE id = ?"
|
||||
end
|
||||
|
||||
module User = struct
|
||||
let migrate =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE user (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
username VARCHAR(255) NOT NULL UNIQUE,
|
||||
|
@ -469,14 +551,11 @@ module User = struct
|
|||
|}
|
||||
|
||||
let rollback =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE IF EXISTS user"
|
||||
|
||||
let get_user =
|
||||
Caqti_request.find_opt
|
||||
Caqti_type.string
|
||||
(Caqti_type.tup2 id user_info)
|
||||
Caqti_type.string ->? Caqti_type.t2 (id `user) user_info @@
|
||||
{| SELECT id, username, password_hash, password_salt,
|
||||
scrypt_n, scrypt_r, scrypt_p, restricted
|
||||
FROM user
|
||||
|
@ -484,47 +563,36 @@ module User = struct
|
|||
|}
|
||||
|
||||
let get_all =
|
||||
Caqti_request.collect
|
||||
Caqti_type.unit
|
||||
Caqti_type.string
|
||||
Caqti_type.unit ->* Caqti_type.string @@
|
||||
"SELECT username FROM user"
|
||||
|
||||
let add =
|
||||
Caqti_request.exec
|
||||
user_info
|
||||
user_info ->. Caqti_type.unit @@
|
||||
{| INSERT INTO user (username, password_hash, password_salt,
|
||||
scrypt_n, scrypt_r, scrypt_p, restricted)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
|}
|
||||
|
||||
let remove =
|
||||
Caqti_request.exec
|
||||
id
|
||||
"DELETE FROM user WHERE id = ?"
|
||||
|
||||
let remove_user =
|
||||
Caqti_request.exec
|
||||
Caqti_type.string
|
||||
Caqti_type.string ->. Caqti_type.unit @@
|
||||
"DELETE FROM user WHERE username = ?"
|
||||
|
||||
let update_user =
|
||||
Caqti_request.exec
|
||||
user_info
|
||||
user_info ->. Caqti_type.unit @@
|
||||
{| UPDATE user
|
||||
SET password_hash = ?2,
|
||||
password_salt = ?3,
|
||||
scrypt_n = ?4,
|
||||
scrypt_r = ?5,
|
||||
scrypt_p = ?6,
|
||||
restricted = ?7
|
||||
WHERE username = ?1
|
||||
SET password_hash = $2,
|
||||
password_salt = $3,
|
||||
scrypt_n = $4,
|
||||
scrypt_r = $5,
|
||||
scrypt_p = $6,
|
||||
restricted = $7
|
||||
WHERE username = $1
|
||||
|}
|
||||
end
|
||||
|
||||
module Access_list = struct
|
||||
let migrate =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE access_list (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
user INTEGER NOT NULL,
|
||||
|
@ -537,30 +605,35 @@ module Access_list = struct
|
|||
|}
|
||||
|
||||
let rollback =
|
||||
Caqti_request.exec
|
||||
Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP TABLE IF EXISTS access_list"
|
||||
|
||||
let get =
|
||||
Caqti_request.find
|
||||
Caqti_type.(tup2 Rep.id Rep.id)
|
||||
Rep.id
|
||||
Caqti_type.t2 (id `user) (id `job) ->! id `access_list @@
|
||||
"SELECT id FROM access_list WHERE user = ? AND job = ?"
|
||||
|
||||
let add =
|
||||
Caqti_request.exec
|
||||
Caqti_type.(tup2 Rep.id Rep.id)
|
||||
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@
|
||||
"INSERT INTO access_list (user, job) VALUES (?, ?)"
|
||||
|
||||
let remove =
|
||||
Caqti_request.exec
|
||||
Caqti_type.(tup2 Rep.id Rep.id)
|
||||
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@
|
||||
"DELETE FROM access_list WHERE user = ? AND job = ?"
|
||||
|
||||
let remove_by_job =
|
||||
id `job ->. Caqti_type.unit @@
|
||||
"DELETE FROM access_list WHERE job = ?"
|
||||
|
||||
let remove_all_by_username =
|
||||
Caqti_request.exec
|
||||
Caqti_type.string
|
||||
"DELETE FROM access_list, user WHERE access_list.user = user.id AND user.username = ?"
|
||||
Caqti_type.string ->. Caqti_type.unit @@
|
||||
{| DELETE FROM access_list
|
||||
WHERE access_list.id IN (
|
||||
SELECT access_list.id
|
||||
FROM access_list
|
||||
INNER JOIN user ON access_list.user = user.id
|
||||
WHERE user.username = ?
|
||||
)
|
||||
|}
|
||||
|
||||
end
|
||||
|
||||
|
@ -568,26 +641,48 @@ let migrate = [
|
|||
Job.migrate;
|
||||
Build.migrate;
|
||||
Build_artifact.migrate;
|
||||
Build_file.migrate;
|
||||
User.migrate;
|
||||
Access_list.migrate;
|
||||
Caqti_request.exec Caqti_type.unit
|
||||
Tag.migrate;
|
||||
Job_tag.migrate;
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE main_binary IS NULL";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_input_id ON build(input_id)";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_main_binary ON build(main_binary)";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)";
|
||||
set_current_version;
|
||||
set_application_id;
|
||||
]
|
||||
|
||||
let rollback = [
|
||||
Job_tag.rollback;
|
||||
Tag.rollback;
|
||||
Access_list.rollback;
|
||||
User.rollback;
|
||||
Build_file.migrate;
|
||||
Build_artifact.rollback;
|
||||
Build.rollback;
|
||||
Job.rollback;
|
||||
Caqti_request.exec Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_artifact_build";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_artifact_sha256";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_failed";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_input_id";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_main_binary";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_job_start";
|
||||
Caqti_request.exec Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"PRAGMA user_version = 0";
|
||||
Caqti_request.exec Caqti_type.unit
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"PRAGMA application_id = 0";
|
||||
]
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
module Rep : sig
|
||||
type id
|
||||
type untyped_id
|
||||
type 'a id
|
||||
type file = {
|
||||
filepath : Fpath.t;
|
||||
localpath : Fpath.t;
|
||||
sha256 : Cstruct.t;
|
||||
sha256 : string;
|
||||
size : int;
|
||||
}
|
||||
|
||||
val id : id Caqti_type.t
|
||||
val id_to_int64 : 'a id -> int64
|
||||
val untyped_id : untyped_id Caqti_type.t
|
||||
val id : 'a -> 'a id Caqti_type.t
|
||||
val uuid : Uuidm.t Caqti_type.t
|
||||
val ptime : Ptime.t Caqti_type.t
|
||||
val fpath : Fpath.t Caqti_type.t
|
||||
val cstruct : Cstruct.t Caqti_type.t
|
||||
val file : file Caqti_type.t
|
||||
val execution_result : Builder.execution_result Caqti_type.t
|
||||
val console : (int * string) list Caqti_type.t
|
||||
end
|
||||
type id = Rep.id
|
||||
type 'a id = 'a Rep.id
|
||||
|
||||
type file = Rep.file = {
|
||||
filepath : Fpath.t;
|
||||
localpath : Fpath.t;
|
||||
sha256 : Cstruct.t;
|
||||
sha256 : string;
|
||||
size : int;
|
||||
}
|
||||
|
||||
|
@ -30,77 +30,67 @@ val application_id : int32
|
|||
val current_version : int64
|
||||
|
||||
val get_application_id :
|
||||
(unit, int32, [< `Many | `One | `Zero > `One ]) Caqti_request.t
|
||||
(unit, int32, [ `One ]) Caqti_request.t
|
||||
|
||||
val set_application_id :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
(unit, unit, [ `Zero ]) Caqti_request.t
|
||||
|
||||
val get_version :
|
||||
(unit, int64, [< `Many | `One | `Zero > `One ]) Caqti_request.t
|
||||
(unit, int64, [ `One ]) Caqti_request.t
|
||||
|
||||
val set_current_version :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
(unit, unit, [ `Zero ]) Caqti_request.t
|
||||
|
||||
val last_insert_rowid :
|
||||
(unit, id, [< `Many | `One | `Zero > `One ]) Caqti_request.t
|
||||
(unit, 'a id, [ `One ]) Caqti_request.t
|
||||
|
||||
module Job : sig
|
||||
val migrate :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val rollback :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
|
||||
val get :
|
||||
(id, string, [< `Many | `One | `Zero > `One ])
|
||||
([`job] id, string, [ `One ])
|
||||
Caqti_request.t
|
||||
val get_id_by_name :
|
||||
(string, id, [< `Many | `One | `Zero > `One `Zero ]) Caqti_request.t
|
||||
val get_all :
|
||||
(unit, id * string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
(string, [`job] id, [ `One | `Zero ]) Caqti_request.t
|
||||
val get_all_with_section_synopsis :
|
||||
(unit, [`job] id * string * string option * string option, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val try_add :
|
||||
(string, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
(string, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove :
|
||||
(id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
([`job] id, unit, [ `Zero ]) Caqti_request.t
|
||||
end
|
||||
|
||||
module Tag : sig
|
||||
val get_id_by_name :
|
||||
(string, [`tag] id, [ `One ]) Caqti_request.t
|
||||
val try_add :
|
||||
(string, unit, [ `Zero ]) Caqti_request.t
|
||||
end
|
||||
|
||||
module Job_tag : sig
|
||||
val add :
|
||||
([`tag] id * string * [`job] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val update :
|
||||
([`tag] id * string * [`job] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val get_value :
|
||||
([`tag] id * [`job] id, string, [ `One | `Zero ]) Caqti_request.t
|
||||
val remove_by_job :
|
||||
([`job] id, unit, [ `Zero ]) Caqti_request.t
|
||||
end
|
||||
|
||||
module Build_artifact : sig
|
||||
val migrate :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val rollback :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
|
||||
val get_by_build :
|
||||
(id * Fpath.t, id * file,
|
||||
[< `Many | `One | `Zero > `One ]) Caqti_request.t
|
||||
|
||||
val get : ([`build_artifact] id, file, [ `One]) Caqti_request.t
|
||||
val get_by_build_uuid :
|
||||
(Uuidm.t * Fpath.t, id * file,
|
||||
[< `Many | `One | `Zero > `One `Zero ])
|
||||
(Uuidm.t * Fpath.t, [`build_artifact] id * file,
|
||||
[ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val get_all_by_build :
|
||||
(id, id * file, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
([`build] id, [`build_artifact] id * file, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val exists : (string, bool, [ `One ]) Caqti_request.t
|
||||
val add :
|
||||
(file * id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
(file * [`build] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove_by_build :
|
||||
(id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
end
|
||||
|
||||
module Build_file : sig
|
||||
val migrate :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val rollback :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
|
||||
val get_by_build_uuid :
|
||||
(Uuidm.t * Fpath.t, id * file,
|
||||
[< `Many | `One | `Zero > `One `Zero ])
|
||||
Caqti_request.t
|
||||
val get_all_by_build :
|
||||
(id, id * file, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val add :
|
||||
(file * id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val remove_by_build :
|
||||
(id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
([`build] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove :
|
||||
([`build_artifact] id, unit, [ `Zero ]) Caqti_request.t
|
||||
end
|
||||
|
||||
module Build :
|
||||
|
@ -110,96 +100,98 @@ sig
|
|||
start : Ptime.t;
|
||||
finish : Ptime.t;
|
||||
result : Builder.execution_result;
|
||||
console : (int * string) list;
|
||||
script : string;
|
||||
main_binary : id option;
|
||||
user_id : id;
|
||||
job_id : id;
|
||||
console : Fpath.t;
|
||||
script : Fpath.t;
|
||||
platform : string;
|
||||
main_binary : [`build_artifact] id option;
|
||||
input_id : string option;
|
||||
user_id : [`user] id;
|
||||
job_id : [`job] id;
|
||||
}
|
||||
module Meta :
|
||||
sig
|
||||
type t = {
|
||||
uuid : Uuidm.t;
|
||||
start : Ptime.t;
|
||||
finish : Ptime.t;
|
||||
result : Builder.execution_result;
|
||||
main_binary : id option;
|
||||
user_id : id;
|
||||
job_id : id;
|
||||
}
|
||||
end
|
||||
|
||||
val migrate :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val rollback :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val pp : t Fmt.t
|
||||
|
||||
val get_opt :
|
||||
(id, t, [< `Many | `One | `Zero > `One `Zero ]) Caqti_request.t
|
||||
val get_by_uuid :
|
||||
(Uuidm.t, id * t, [< `Many | `One | `Zero > `One `Zero ])
|
||||
(Uuidm.t, [`build] id * t, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val get_all :
|
||||
(id, id * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_all_meta :
|
||||
(id, id * Meta.t * file option, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_latest :
|
||||
(id, id * Meta.t * file option, [< `Many | `One | `Zero > `One `Zero ])
|
||||
([`job] id, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_all_failed :
|
||||
(int * int * string option, string * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_all_artifact_sha :
|
||||
([`job] id * string option, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_latest_successful_with_binary :
|
||||
([`job] id * string, [`build] id * t * file, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val get_latest_uuid :
|
||||
(id, id * Uuidm.t, [< `Many | `One | `Zero > `One `Zero ])
|
||||
val get_failed_builds :
|
||||
([`job] id * string option, t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_latest_successful :
|
||||
([`job] id * string option, t, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val get_latest_successful_uuid :
|
||||
(id, Uuidm.t, [< `Many | `One | `Zero > `One `Zero ])
|
||||
val get_builds_older_than :
|
||||
([`job] id * string option * Ptime.t, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_builds_excluding_latest_n :
|
||||
([`job] id * string option * int, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_nth_latest_successful :
|
||||
([`job] id * string option * int, [`build] id * t, [ `One | `Zero ]) Caqti_request.t
|
||||
val get_previous_successful_different_output :
|
||||
([`build] id, t, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val get_previous_successful :
|
||||
(id, id * Meta.t, [< `Many | `One | `Zero > `One `Zero ])
|
||||
val get_next_successful_different_output :
|
||||
([`build] id, t, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val add : (t, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val get_same_input_same_output_builds :
|
||||
([`build] id, t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_same_input_different_output_hashes :
|
||||
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_different_input_same_output_input_ids :
|
||||
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_one_by_input_id :
|
||||
(string, t, [ `One ]) Caqti_request.t
|
||||
val get_platforms_for_job :
|
||||
([`job] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val add : (t, unit, [ `Zero ]) Caqti_request.t
|
||||
val get_by_hash :
|
||||
(Cstruct.t, string * t, [< `Many | `One | `Zero > `One `Zero]) Caqti_request.t
|
||||
val set_main_binary : (id * id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val remove : (id, unit, [< `Many | `One | `Zero > `Zero]) Caqti_request.t
|
||||
(string, t, [ `One]) Caqti_request.t
|
||||
val get_with_main_binary_by_hash :
|
||||
(string, t * file option, [ `One]) Caqti_request.t
|
||||
val get_with_jobname_by_hash :
|
||||
(string, string * t, [ `One | `Zero]) Caqti_request.t
|
||||
val set_main_binary : ([`build] id * [`build_artifact] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove : ([`build] id, unit, [ `Zero]) Caqti_request.t
|
||||
end
|
||||
|
||||
module User : sig
|
||||
val migrate :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val rollback :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val get_user :
|
||||
(string, id * Builder_web_auth.scrypt Builder_web_auth.user_info,
|
||||
[< `Many | `One | `Zero > `One `Zero ])
|
||||
(string, [`user] id * Builder_web_auth.scrypt Builder_web_auth.user_info,
|
||||
[ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val get_all :
|
||||
(unit, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val add :
|
||||
(Builder_web_auth.scrypt Builder_web_auth.user_info, unit, [< `Many | `One | `Zero > `Zero ])
|
||||
(Builder_web_auth.scrypt Builder_web_auth.user_info, unit, [ `Zero ])
|
||||
Caqti_request.t
|
||||
val remove : (id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val remove_user :
|
||||
(string, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
(string, unit, [ `Zero ]) Caqti_request.t
|
||||
val update_user :
|
||||
(Builder_web_auth.scrypt Builder_web_auth.user_info, unit, [< `Many | `One | `Zero > `Zero ])
|
||||
(Builder_web_auth.scrypt Builder_web_auth.user_info, unit, [ `Zero ])
|
||||
Caqti_request.t
|
||||
end
|
||||
|
||||
module Access_list : sig
|
||||
val migrate :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val rollback :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
val get :
|
||||
(id * id, id, [< `Many | `One | `Zero > `One ]) Caqti_request.t
|
||||
([`user] id * [`job] id, [`access_list] id, [ `One ]) Caqti_request.t
|
||||
val add :
|
||||
(id * id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
([`user] id * [`job] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove :
|
||||
(id * id, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
([`user] id * [`job] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove_by_job :
|
||||
([`job] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove_all_by_username :
|
||||
(string, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t
|
||||
(string, unit, [ `Zero ]) Caqti_request.t
|
||||
end
|
||||
|
||||
val migrate :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t list
|
||||
(unit, unit, [ `Zero ]) Caqti_request.t list
|
||||
val rollback :
|
||||
(unit, unit, [< `Many | `One | `Zero > `Zero ]) Caqti_request.t list
|
||||
(unit, unit, [ `Zero ]) Caqti_request.t list
|
||||
|
|
2
db/dune
2
db/dune
|
@ -1,3 +1,3 @@
|
|||
(library
|
||||
(name builder_db)
|
||||
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators mirage-crypto builder_web_auth))
|
||||
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators builder_web_auth))
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
module Asn = struct
|
||||
let decode_strict codec cs =
|
||||
match Asn.decode codec cs with
|
||||
| Ok (a, cs) ->
|
||||
if Cstruct.len cs = 0
|
||||
| Ok (a, rest) ->
|
||||
if String.length rest = 0
|
||||
then Ok a
|
||||
else Error "trailing bytes"
|
||||
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
||||
|
@ -17,23 +17,27 @@ module Asn = struct
|
|||
(required ~label:"delta" int)
|
||||
(required ~label:"data" utf8_string)))
|
||||
|
||||
let console_of_cs, console_to_cs = projections_of console
|
||||
let console_of_str, console_to_str = projections_of console
|
||||
end
|
||||
|
||||
type id = int64
|
||||
let id = Caqti_type.int64
|
||||
type untyped_id = int64
|
||||
let untyped_id = Caqti_type.int64
|
||||
type 'a id = untyped_id
|
||||
let id (_ : 'a) : 'a id Caqti_type.t = untyped_id
|
||||
let any_id : 'a id Caqti_type.t = untyped_id
|
||||
|
||||
let id_to_int64 (id : 'a id) : int64 = id
|
||||
|
||||
type file = {
|
||||
filepath : Fpath.t;
|
||||
localpath : Fpath.t;
|
||||
sha256 : Cstruct.t;
|
||||
sha256 : string;
|
||||
size : int;
|
||||
}
|
||||
|
||||
let uuid =
|
||||
let encode uuid = Ok (Uuidm.to_bytes uuid) in
|
||||
let encode uuid = Ok (Uuidm.to_string uuid) in
|
||||
let decode s =
|
||||
Uuidm.of_bytes s
|
||||
Uuidm.of_string s
|
||||
|> Option.to_result ~none:"failed to decode uuid"
|
||||
in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.string
|
||||
|
@ -43,7 +47,7 @@ let ptime =
|
|||
let encode t = Ok (Ptime.Span.to_d_ps (Ptime.to_span t)) in
|
||||
let decode (d, ps) = Ok (Ptime.v (d, ps))
|
||||
in
|
||||
let rep = Caqti_type.(tup2 int int64) in
|
||||
let rep = Caqti_type.(t2 int int64) in
|
||||
Caqti_type.custom ~encode ~decode rep
|
||||
|
||||
let fpath =
|
||||
|
@ -52,30 +56,25 @@ let fpath =
|
|||
|> Result.map_error (fun (`Msg s) -> s) in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.string
|
||||
|
||||
let cstruct =
|
||||
let encode t = Ok (Cstruct.to_string t) in
|
||||
let decode s = Ok (Cstruct.of_string s) in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.octets
|
||||
|
||||
let file =
|
||||
let encode { filepath; localpath; sha256; size } =
|
||||
Ok (filepath, localpath, sha256, size) in
|
||||
let decode (filepath, localpath, sha256, size) =
|
||||
Ok { filepath; localpath; sha256; size } in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.(tup4 fpath fpath cstruct int)
|
||||
let encode { filepath; sha256; size } =
|
||||
Ok (filepath, sha256, size) in
|
||||
let decode (filepath, sha256, size) =
|
||||
Ok { filepath; sha256; size } in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.(t3 fpath octets int)
|
||||
|
||||
let file_opt =
|
||||
let rep = Caqti_type.(tup4 (option fpath) (option fpath) (option cstruct) (option int)) in
|
||||
let rep = Caqti_type.(t3 (option fpath) (option octets) (option int)) in
|
||||
let encode = function
|
||||
| Some { filepath; localpath; sha256; size } ->
|
||||
Ok (Some filepath, Some localpath, Some sha256, Some size)
|
||||
| Some { filepath; sha256; size } ->
|
||||
Ok (Some filepath, Some sha256, Some size)
|
||||
| None ->
|
||||
Ok (None, None, None, None)
|
||||
Ok (None, None, None)
|
||||
in
|
||||
let decode = function
|
||||
| (Some filepath, Some localpath, Some sha256, Some size) ->
|
||||
Ok (Some { filepath; localpath; sha256; size })
|
||||
| (None, None, None, None) ->
|
||||
| (Some filepath, Some sha256, Some size) ->
|
||||
Ok (Some { filepath; sha256; size })
|
||||
| (None, None, None) ->
|
||||
Ok None
|
||||
| _ ->
|
||||
(* This should not happen if the database is well-formed *)
|
||||
|
@ -85,38 +84,44 @@ let file_opt =
|
|||
|
||||
let execution_result =
|
||||
let encode = function
|
||||
| Builder.Exited v -> Ok (0, Some v, None)
|
||||
| Builder.Signalled v -> Ok (1, Some v, None)
|
||||
| Builder.Stopped v -> Ok (2, Some v, None)
|
||||
| Builder.Msg msg -> Ok (3, None, Some msg)
|
||||
| Builder.Exited v -> Ok (v, None)
|
||||
| Builder.Signalled v -> Ok (v lsl 8, None)
|
||||
| Builder.Stopped v -> Ok (v lsl 16, None)
|
||||
| Builder.Msg msg -> Ok (65536, Some msg)
|
||||
in
|
||||
let decode (kind, code, msg) =
|
||||
match kind, code, msg with
|
||||
| 0, Some v, None -> Ok (Builder.Exited v)
|
||||
| 1, Some v, None -> Ok (Builder.Signalled v)
|
||||
| 2, Some v, None -> Ok (Builder.Stopped v)
|
||||
| 3, None, Some msg -> Ok (Builder.Msg msg)
|
||||
| _ -> Error "bad encoding"
|
||||
let decode (code, msg) =
|
||||
if code <= 0xFF then
|
||||
Ok (Builder.Exited code)
|
||||
else if code <= 0xFFFF then
|
||||
Ok (Builder.Signalled (code lsr 8))
|
||||
else if code <= 0xFFFFFF then
|
||||
Ok (Builder.Stopped (code lsr 16))
|
||||
else if code = 65536 then
|
||||
match msg with
|
||||
| None -> Error "bad encoding"
|
||||
| Some m -> Ok (Builder.Msg m)
|
||||
else
|
||||
Error "bad encoding (unknown number)"
|
||||
in
|
||||
let rep = Caqti_type.(tup3 int (option int) (option string)) in
|
||||
let rep = Caqti_type.(t2 int (option string)) in
|
||||
Caqti_type.custom ~encode ~decode rep
|
||||
|
||||
let console =
|
||||
let encode console = Ok (Asn.console_to_cs console) in
|
||||
let decode data = Asn.console_of_cs data in
|
||||
Caqti_type.custom ~encode ~decode cstruct
|
||||
let encode console = Ok (Asn.console_to_str console) in
|
||||
let decode data = Asn.console_of_str data in
|
||||
Caqti_type.(custom ~encode ~decode octets)
|
||||
|
||||
let user_info =
|
||||
let rep = Caqti_type.(tup4 string cstruct cstruct (tup4 int int int bool)) in
|
||||
let rep = Caqti_type.(t7 string octets octets int int int bool) in
|
||||
let encode { Builder_web_auth.username;
|
||||
password_hash = `Scrypt (password_hash, password_salt, {
|
||||
Builder_web_auth.scrypt_n; scrypt_r; scrypt_p
|
||||
});
|
||||
restricted; }
|
||||
=
|
||||
Ok (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted))
|
||||
Ok (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted)
|
||||
in
|
||||
let decode (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted)) =
|
||||
let decode (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) =
|
||||
Ok { Builder_web_auth.username;
|
||||
password_hash =
|
||||
`Scrypt (password_hash, password_salt,
|
||||
|
@ -124,3 +129,9 @@ let user_info =
|
|||
scrypt_r; scrypt_p });
|
||||
restricted; } in
|
||||
Caqti_type.custom ~encode ~decode rep
|
||||
|
||||
(* this doesn't really belong in this module, but we need access to the type of [id] *)
|
||||
let last_insert_rowid =
|
||||
let open Caqti_request.Infix in
|
||||
Caqti_type.unit ->! any_id @@
|
||||
"SELECT last_insert_rowid()"
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
(lang dune 2.7)
|
||||
(name builder-web)
|
||||
(formatting disabled)
|
||||
|
|
|
@ -6,29 +6,14 @@ open Lwt.Syntax
|
|||
|
||||
let realm = "builder-web"
|
||||
|
||||
let user_info_local = Dream.new_local ~name:"user_info" ()
|
||||
let user_info_field = Dream.new_field ~name:"user_info" ()
|
||||
|
||||
let authenticate handler = fun req ->
|
||||
let unauthorized () =
|
||||
let headers = ["WWW-Authenticate", Printf.sprintf "Basic realm=\"%s\"" realm] in
|
||||
Dream.respond ~headers ~status:`Unauthorized "Forbidden!"
|
||||
in
|
||||
match Dream.session "username" req with
|
||||
| Some username ->
|
||||
let* user_info = Dream.sql req (Model.user username) in
|
||||
begin match user_info with
|
||||
| Ok (Some (user_id, user_info)) ->
|
||||
handler (Dream.with_local user_info_local (user_id, user_info) req)
|
||||
| Ok None ->
|
||||
Log.warn (fun m -> m "User %S from session doesn't exist" username);
|
||||
let* () = Dream.invalidate_session req in
|
||||
Dream.respond ~status:`Internal_Server_Error "Internal server error"
|
||||
| Error e ->
|
||||
Log.warn (fun m -> m "Error getting user: %a" Model.pp_error e);
|
||||
Dream.respond ~status:`Internal_Server_Error "Internal server error"
|
||||
end
|
||||
| None ->
|
||||
match Dream.header "Authorization" req with
|
||||
match Dream.header req "Authorization" with
|
||||
| None -> unauthorized ()
|
||||
| Some data -> match String.split_on_char ' ' data with
|
||||
| [ "Basic" ; user_pass ] ->
|
||||
|
@ -46,7 +31,7 @@ let authenticate handler = fun req ->
|
|||
match user_info with
|
||||
| Ok (Some (id, user_info)) ->
|
||||
if Builder_web_auth.verify_password pass user_info
|
||||
then handler (Dream.with_local user_info_local (id, user_info) req)
|
||||
then (Dream.set_field req user_info_field (id, user_info); handler req)
|
||||
else unauthorized ()
|
||||
| Ok None ->
|
||||
let _ : _ Builder_web_auth.user_info =
|
||||
|
@ -60,7 +45,7 @@ let authenticate handler = fun req ->
|
|||
Dream.respond ~status:`Bad_Request "Couldn't decode authorization"
|
||||
|
||||
let authorized req job_name =
|
||||
match Dream.local user_info_local req with
|
||||
match Dream.field req user_info_field with
|
||||
| None -> Lwt.return (Error (`Msg "not authenticated"))
|
||||
| Some (id, user) ->
|
||||
if user.restricted then
|
||||
|
|
|
@ -9,21 +9,24 @@ let pp_error ppf = function
|
|||
| #Model.error as e -> Model.pp_error ppf e
|
||||
| `Wrong_version (application_id, version) ->
|
||||
if application_id = Builder_db.application_id
|
||||
then Format.fprintf ppf "Wrong database version: %Ld" version
|
||||
else Format.fprintf ppf "Wrong database application id: %ld" application_id
|
||||
then Format.fprintf ppf "Wrong database version: %Ld, expected %Ld" version Builder_db.current_version
|
||||
else Format.fprintf ppf "Wrong database application id: %ld, expected %ld" application_id Builder_db.application_id
|
||||
|
||||
let init_datadir datadir =
|
||||
let open Rresult.R.Infix in
|
||||
Bos.OS.Dir.exists datadir >>= (fun exists ->
|
||||
let ( let* ) = Result.bind and ( let+ ) x f = Result.map f x in
|
||||
let* exists = Bos.OS.Dir.exists datadir in
|
||||
let* () =
|
||||
if exists
|
||||
then Ok ()
|
||||
else Error (`Msg "Datadir does not exist")) >>= fun () ->
|
||||
Bos.OS.Dir.create ~path:false (Model.staging datadir) >>| fun _ -> ()
|
||||
else Error (`Msg "Datadir does not exist")
|
||||
in
|
||||
let+ _ = Bos.OS.Dir.create ~path:false (Model.staging datadir) in
|
||||
()
|
||||
|
||||
let init dbpath datadir =
|
||||
Rresult.R.bind (init_datadir datadir) @@ fun () ->
|
||||
Result.bind (init_datadir datadir) @@ fun () ->
|
||||
Lwt_main.run (
|
||||
Caqti_lwt.connect
|
||||
Caqti_lwt_unix.connect
|
||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
>>= fun (module Db : Caqti_lwt.CONNECTION) ->
|
||||
Db.find Builder_db.get_application_id () >>= fun application_id ->
|
||||
|
@ -34,17 +37,27 @@ let init dbpath datadir =
|
|||
>>= fun () ->
|
||||
Model.cleanup_staging datadir (module Db))
|
||||
|
||||
let pp_exec ppf (job, uuid, _, _, _, _, _) =
|
||||
let pp_exec ppf ((job : Builder.script_job), uuid, _, _, _, _, _) =
|
||||
Format.fprintf ppf "%s(%a)" job.Builder.name Uuidm.pp uuid
|
||||
|
||||
let safe_seg path =
|
||||
if Fpath.is_seg path && not (Fpath.is_rel_seg path)
|
||||
then Ok (Fpath.v path)
|
||||
else Rresult.R.error_msgf "unsafe path %S" path
|
||||
else Fmt.kstr (fun s -> Error (`Msg s)) "unsafe path %S" path
|
||||
|
||||
(* mime lookup with orb knowledge *)
|
||||
let append_charset = function
|
||||
(* mime types from nginx:
|
||||
http://nginx.org/en/docs/http/ngx_http_charset_module.html#charset_types *)
|
||||
| "text/html" | "text/xml" | "text/plain" | "text/vnd.wap.wml"
|
||||
| "application/javascript" | "application/rss+xml" | "application/atom+xml"
|
||||
as content_type ->
|
||||
content_type ^ "; charset=utf-8" (* default to utf-8 *)
|
||||
| content_type -> content_type
|
||||
|
||||
let mime_lookup path =
|
||||
match Fpath.to_string path with
|
||||
append_charset
|
||||
(match Fpath.to_string path with
|
||||
| "build-environment" | "opam-switch" | "system-packages" ->
|
||||
"text/plain"
|
||||
| _ ->
|
||||
|
@ -52,15 +65,35 @@ let mime_lookup path =
|
|||
then "text/plain"
|
||||
else if Fpath.is_prefix Fpath.(v "bin/") path
|
||||
then "application/octet-stream"
|
||||
else Magic_mime.lookup (Fpath.to_string path)
|
||||
else Magic_mime.lookup (Fpath.to_string path))
|
||||
|
||||
let or_error_response r =
|
||||
let string_of_html =
|
||||
Format.asprintf "%a" (Tyxml.Html.pp ())
|
||||
|
||||
let is_accept_json req =
|
||||
match Dream.header req "Accept" with
|
||||
| Some accept when String.starts_with ~prefix:"application/json" accept ->
|
||||
true
|
||||
| _ -> false
|
||||
|
||||
let or_error_response req r =
|
||||
let* r = r in
|
||||
match r with
|
||||
| Ok response -> Lwt.return response
|
||||
| Error (text, status) -> Dream.respond ~status text
|
||||
| Error (text, status) ->
|
||||
if is_accept_json req then
|
||||
let json_response = Yojson.Basic.to_string (`Assoc [ "error", `String text ]) in
|
||||
Dream.json ~status json_response
|
||||
else
|
||||
Dream.respond ~status text
|
||||
|
||||
let if_error ?(status = `Internal_Server_Error) ?(log=(fun e -> Log.warn (fun m -> m "%s: %a" (Dream.status_to_string status) pp_error e))) message r =
|
||||
let default_log_warn ~status e =
|
||||
Log.warn (fun m -> m "%s: %a" (Dream.status_to_string status) pp_error e)
|
||||
|
||||
let if_error
|
||||
?(status = `Internal_Server_Error)
|
||||
?(log = default_log_warn ~status)
|
||||
message r =
|
||||
let* r = r in
|
||||
match r with
|
||||
| Error `Not_found ->
|
||||
|
@ -70,8 +103,12 @@ let if_error ?(status = `Internal_Server_Error) ?(log=(fun e -> Log.warn (fun m
|
|||
Lwt_result.fail (message, status)
|
||||
| Ok _ as r -> Lwt.return r
|
||||
|
||||
let string_of_html =
|
||||
Format.asprintf "%a" (Tyxml.Html.pp ())
|
||||
let not_found_error r =
|
||||
let* r = r in
|
||||
match r with
|
||||
| Error `Not_found ->
|
||||
Lwt_result.fail ("Resource not found", `Not_Found)
|
||||
| Ok _ as r -> Lwt.return r
|
||||
|
||||
let get_uuid s =
|
||||
Lwt.return
|
||||
|
@ -81,93 +118,345 @@ let get_uuid s =
|
|||
| None -> Error ("Bad uuid", `Bad_Request)
|
||||
else Error ("Bad uuid", `Bad_Request))
|
||||
|
||||
let add_routes datadir =
|
||||
let datadir_global = Dream.new_global ~name:"datadir" (fun () -> datadir) in
|
||||
|
||||
let builder req =
|
||||
Dream.sql req Model.jobs
|
||||
let main_binary_of_uuid uuid db =
|
||||
Model.build uuid db
|
||||
|> if_error "Error getting job build"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting job build: %a" pp_error e))
|
||||
>>= fun (_id, build) ->
|
||||
Model.not_found build.Builder_db.Build.main_binary
|
||||
|> not_found_error
|
||||
>>= fun main_binary ->
|
||||
Model.build_artifact_by_id main_binary db
|
||||
|> if_error "Error getting main binary"
|
||||
|
||||
module Viz_aux = struct
|
||||
|
||||
let viz_type_to_string = function
|
||||
| `Treemap -> "treemap"
|
||||
| `Dependencies -> "dependencies"
|
||||
|
||||
let viz_dir ~cachedir ~viz_typ ~version =
|
||||
let typ_str = viz_type_to_string viz_typ in
|
||||
Fpath.(cachedir / Fmt.str "%s_%d" typ_str version)
|
||||
|
||||
let viz_path ~cachedir ~viz_typ ~version ~input_hash =
|
||||
Fpath.(
|
||||
viz_dir ~cachedir ~viz_typ ~version
|
||||
/ input_hash + "html"
|
||||
)
|
||||
|
||||
let choose_versioned_viz_path
|
||||
~cachedir
|
||||
~viz_typ
|
||||
~viz_input_hash
|
||||
~current_version =
|
||||
let ( >>= ) = Result.bind in
|
||||
let rec aux current_version =
|
||||
let path =
|
||||
viz_path ~cachedir
|
||||
~viz_typ
|
||||
~version:current_version
|
||||
~input_hash:viz_input_hash in
|
||||
Bos.OS.File.exists path >>= fun path_exists ->
|
||||
if path_exists then Ok path else (
|
||||
if current_version = 1 then
|
||||
Error (`Msg (Fmt.str "viz '%s': There exist no version of the requested \
|
||||
visualization"
|
||||
(viz_type_to_string viz_typ)))
|
||||
else
|
||||
aux @@ pred current_version
|
||||
)
|
||||
in
|
||||
aux current_version
|
||||
|
||||
let get_viz_version_from_dirs ~cachedir ~viz_typ =
|
||||
let ( >>= ) = Result.bind in
|
||||
Bos.OS.Dir.contents cachedir >>= fun versioned_dirs ->
|
||||
let max_cached_version =
|
||||
let viz_typ_str = viz_type_to_string viz_typ ^ "_" in
|
||||
versioned_dirs
|
||||
|> List.filter_map (fun versioned_dir ->
|
||||
match Bos.OS.Dir.exists versioned_dir with
|
||||
| Error (`Msg err) ->
|
||||
Logs.warn (fun m -> m "%s" err);
|
||||
None
|
||||
| Ok false -> None
|
||||
| Ok true ->
|
||||
let dir_str = Fpath.filename versioned_dir in
|
||||
if not (String.starts_with ~prefix:viz_typ_str dir_str) then
|
||||
None
|
||||
else
|
||||
try
|
||||
String.(sub dir_str
|
||||
(length viz_typ_str)
|
||||
(length dir_str - length viz_typ_str))
|
||||
|> int_of_string
|
||||
|> Option.some
|
||||
with Failure _ ->
|
||||
Logs.warn (fun m ->
|
||||
m "Failed to read visualization-version from directory: '%s'"
|
||||
(Fpath.to_string versioned_dir));
|
||||
None
|
||||
)
|
||||
|> List.fold_left Int.max (-1)
|
||||
in
|
||||
if max_cached_version = -1 then
|
||||
Result.error @@
|
||||
`Msg (Fmt.str "Couldn't find any visualization-version of %s"
|
||||
(viz_type_to_string viz_typ))
|
||||
else
|
||||
Result.ok max_cached_version
|
||||
|
||||
let hash_viz_input ~uuid typ db =
|
||||
let open Builder_db in
|
||||
main_binary_of_uuid uuid db >>= fun main_binary ->
|
||||
Model.build uuid db
|
||||
|> if_error "Error getting build" >>= fun (build_id, _build) ->
|
||||
Model.build_artifacts build_id db
|
||||
|> if_error "Error getting build artifacts" >>= fun artifacts ->
|
||||
match typ with
|
||||
| `Treemap ->
|
||||
let debug_binary =
|
||||
let bin = Fpath.base main_binary.filepath in
|
||||
List.find_opt
|
||||
(fun p -> Fpath.(equal (bin + "debug") (base p.filepath)))
|
||||
artifacts
|
||||
in
|
||||
begin
|
||||
Model.not_found debug_binary
|
||||
|> not_found_error >>= fun debug_binary ->
|
||||
debug_binary.sha256
|
||||
|> Ohex.encode
|
||||
|> Lwt_result.return
|
||||
end
|
||||
| `Dependencies ->
|
||||
let opam_switch =
|
||||
List.find_opt
|
||||
(fun p -> Fpath.(equal (v "opam-switch") (base p.filepath)))
|
||||
artifacts
|
||||
in
|
||||
Model.not_found opam_switch
|
||||
|> not_found_error >>= fun opam_switch ->
|
||||
opam_switch.sha256
|
||||
|> Ohex.encode
|
||||
|> Lwt_result.return
|
||||
|
||||
let try_load_cached_visualization ~cachedir ~uuid viz_typ db =
|
||||
Lwt.return (get_viz_version_from_dirs ~cachedir ~viz_typ)
|
||||
|> if_error "Error getting visualization version" >>= fun latest_viz_version ->
|
||||
hash_viz_input ~uuid viz_typ db >>= fun viz_input_hash ->
|
||||
(choose_versioned_viz_path
|
||||
~cachedir
|
||||
~current_version:latest_viz_version
|
||||
~viz_typ
|
||||
~viz_input_hash
|
||||
|> Lwt.return
|
||||
|> if_error "Error finding a version of the requested visualization")
|
||||
>>= fun viz_path ->
|
||||
Lwt_result.catch (fun () ->
|
||||
Lwt_io.with_file ~mode:Lwt_io.Input
|
||||
(Fpath.to_string viz_path)
|
||||
Lwt_io.read
|
||||
)
|
||||
|> Lwt_result.map_error (fun exn -> `Msg (Printexc.to_string exn))
|
||||
|> if_error "Error getting cached visualization"
|
||||
|
||||
end
|
||||
|
||||
|
||||
let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||
let builds ~all ?(filter_builds_later_than = 0) req =
|
||||
let than =
|
||||
if filter_builds_later_than = 0 then
|
||||
Ptime.epoch
|
||||
else
|
||||
let n = Ptime.Span.v (filter_builds_later_than, 0L) in
|
||||
let now = Ptime_clock.now () in
|
||||
Ptime.Span.sub (Ptime.to_span now) n |> Ptime.of_span |>
|
||||
Option.fold ~none:Ptime.epoch ~some:Fun.id
|
||||
in
|
||||
Dream.sql req Model.jobs_with_section_synopsis
|
||||
|> if_error "Error getting jobs"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
|
||||
>>= fun jobs ->
|
||||
List.fold_right
|
||||
(fun (job_id, job_name) r ->
|
||||
(fun (job_id, job_name, section, synopsis) r ->
|
||||
r >>= fun acc ->
|
||||
Dream.sql req (Model.build_meta job_id) >>= function
|
||||
| Some (latest_build, latest_artifact) ->
|
||||
Lwt_result.return ((job_name, latest_build, latest_artifact) :: acc)
|
||||
Dream.sql req (Model.platforms_of_job job_id) >>= fun ps ->
|
||||
List.fold_right (fun platform r ->
|
||||
r >>= fun acc ->
|
||||
Dream.sql req (Model.build_with_main_binary job_id platform) >>= function
|
||||
| Some (build, artifact) ->
|
||||
if Ptime.is_later ~than build.finish then
|
||||
Lwt_result.return ((platform, build, artifact) :: acc)
|
||||
else
|
||||
Lwt_result.return acc
|
||||
| None ->
|
||||
Log.warn (fun m -> m "Job without builds: %s" job_name);
|
||||
Lwt_result.return acc)
|
||||
ps (Lwt_result.return []) >>= fun platform_builds ->
|
||||
if platform_builds = [] then
|
||||
Lwt_result.return acc
|
||||
else
|
||||
let v = (job_name, synopsis, platform_builds) in
|
||||
let section = Option.value ~default:"Uncategorized" section in
|
||||
Lwt_result.return (Utils.String_map.add_or_create section v acc))
|
||||
jobs
|
||||
(Lwt_result.return [])
|
||||
(Lwt_result.return Utils.String_map.empty)
|
||||
|> if_error "Error getting jobs"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
|
||||
>>= fun jobs ->
|
||||
let username = Dream.session "username" req in
|
||||
Views.builder username (Dream.csrf_token req) jobs
|
||||
|> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
Views.Builds.make ~all jobs |> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let job req =
|
||||
let job_name = Dream.param "job" req in
|
||||
Dream.sql req (Model.job job_name)
|
||||
let job_name = Dream.param req "job" in
|
||||
let platform = Dream.query req "platform" in
|
||||
(Dream.sql req (Model.job_and_readme job_name) >>= fun (job_id, readme) ->
|
||||
Dream.sql req (Model.builds_grouped_by_output job_id platform) >|= fun builds ->
|
||||
(readme, builds))
|
||||
|> if_error "Error getting job"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting job: %a" pp_error e))
|
||||
>>= fun builds ->
|
||||
Views.job job_name builds |> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
>>= fun (readme, builds) ->
|
||||
Views.Job.make ~failed:false ~job_name ~platform ~readme builds
|
||||
|> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let redirect_latest req =
|
||||
let job_name = Dream.param "job" req in
|
||||
let path = Dream.path req |> String.concat "/" in
|
||||
let job_with_failed req =
|
||||
let job_name = Dream.param req "job" in
|
||||
let platform = Dream.query req "platform" in
|
||||
(Dream.sql req (Model.job_and_readme job_name) >>= fun (job_id, readme) ->
|
||||
Dream.sql req (Model.builds_grouped_by_output_with_failed job_id platform) >|= fun builds ->
|
||||
(readme, builds))
|
||||
|> if_error "Error getting job"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting job: %a" pp_error e))
|
||||
>>= fun (readme, builds) ->
|
||||
Views.Job.make ~failed:true ~job_name ~platform ~readme builds
|
||||
|> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let redirect_latest req ~job_name ~platform ~artifact =
|
||||
(Dream.sql req (Model.job_id job_name) >>= Model.not_found >>= fun job_id ->
|
||||
Dream.sql req (Model.latest_successful_build_uuid job_id))
|
||||
Dream.sql req (Model.latest_successful_build_uuid job_id platform))
|
||||
>>= Model.not_found
|
||||
|> if_error "Error getting job" >>= fun build ->
|
||||
Dream.redirect req
|
||||
(Fmt.strf "/job/%s/build/%a/%s" job_name Uuidm.pp build path)
|
||||
(Link.Job_build_artifact.make_from_string ~job_name ~build ~artifact ())
|
||||
|> Lwt_result.ok
|
||||
in
|
||||
|
||||
let redirect_latest req =
|
||||
let job_name = Dream.param req "job" in
|
||||
let platform = Dream.query req "platform" in
|
||||
let artifact =
|
||||
(* FIXME Dream.path deprecated *)
|
||||
let path = begin[@alert "-deprecated"] Dream.path req end in
|
||||
if path = [] then
|
||||
"" (* redirect without trailing slash *)
|
||||
else
|
||||
"/" ^ (List.map Uri.pct_encode path |> String.concat "/")
|
||||
in
|
||||
redirect_latest req ~job_name ~platform ~artifact
|
||||
|
||||
and redirect_latest_no_slash req =
|
||||
let job_name = Dream.param req "job" in
|
||||
let platform = Dream.query req "platform" in
|
||||
redirect_latest req ~job_name ~platform ~artifact:""
|
||||
in
|
||||
|
||||
let redirect_main_binary req =
|
||||
let job_name = Dream.param req "job"
|
||||
and build = Dream.param req "build" in
|
||||
get_uuid build >>= fun uuid ->
|
||||
Dream.sql req (main_binary_of_uuid uuid) >>= fun main_binary ->
|
||||
let artifact = `File main_binary.Builder_db.filepath in
|
||||
Link.Job_build_artifact.make ~job_name ~build:uuid ~artifact ()
|
||||
|> Dream.redirect req
|
||||
|> Lwt_result.ok
|
||||
in
|
||||
|
||||
let job_build_viz viz_typ req =
|
||||
let _job_name = Dream.param req "job"
|
||||
and build = Dream.param req "build" in
|
||||
get_uuid build >>= fun uuid ->
|
||||
Dream.sql req (Viz_aux.try_load_cached_visualization ~cachedir ~uuid viz_typ)
|
||||
>>= fun svg_html ->
|
||||
Lwt_result.ok (Dream.html svg_html)
|
||||
in
|
||||
|
||||
let job_build req =
|
||||
let job_name = Dream.param "job" req
|
||||
and build = Dream.param "build" req in
|
||||
let job_name = Dream.param req "job"
|
||||
and build = Dream.param req "build" in
|
||||
get_uuid build >>= fun uuid ->
|
||||
(Dream.sql req (Model.build uuid) >>= fun (build_id, build) ->
|
||||
Dream.sql req (Model.build_artifacts build_id) >>= fun artifacts ->
|
||||
Dream.sql req (Model.latest_successful_build_uuid build.job_id) >>= fun latest_uuid ->
|
||||
Dream.sql req (Model.previous_successful_build build_id) >|= fun previous_build ->
|
||||
(build, artifacts, latest_uuid, previous_build))
|
||||
Dream.sql req (fun conn ->
|
||||
Model.build uuid conn >>= fun (build_id, build) ->
|
||||
(match build.Builder_db.Build.main_binary with
|
||||
| Some main_binary ->
|
||||
Model.build_artifact_by_id main_binary conn |> Lwt_result.map Option.some
|
||||
| None -> Lwt_result.return None) >>= fun main_binary ->
|
||||
Model.build_artifacts build_id conn >>= fun artifacts ->
|
||||
Model.builds_with_same_input_and_same_main_binary build_id conn >>= fun same_input_same_output ->
|
||||
Model.builds_with_different_input_and_same_main_binary build_id conn >>= fun different_input_same_output ->
|
||||
Model.builds_with_same_input_and_different_main_binary build_id conn >>= fun same_input_different_output ->
|
||||
Model.latest_successful_build build.job_id (Some build.Builder_db.Build.platform) conn >>= fun latest ->
|
||||
Model.next_successful_build_different_output build_id conn >>= fun next ->
|
||||
Model.previous_successful_build_different_output build_id conn >|= fun previous ->
|
||||
(build, main_binary, artifacts, same_input_same_output, different_input_same_output, same_input_different_output, latest, next, previous)
|
||||
)
|
||||
|> if_error "Error getting job build"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting job build: %a" pp_error e))
|
||||
>>= fun (build, artifacts, latest_uuid, previous_build) ->
|
||||
Views.job_build job_name build artifacts latest_uuid previous_build |> string_of_html |> Dream.html
|
||||
|> Lwt_result.ok
|
||||
>>= fun (build, main_binary, artifacts, same_input_same_output, different_input_same_output, same_input_different_output, latest, next, previous) ->
|
||||
let solo5_manifest = Option.bind main_binary (Model.solo5_manifest datadir) in
|
||||
if is_accept_json req then
|
||||
let json_response =
|
||||
`Assoc [
|
||||
"job_name", `String job_name;
|
||||
"uuid", `String (Uuidm.to_string build.uuid);
|
||||
"platform", `String build.platform;
|
||||
"start_time", `String (Ptime.to_rfc3339 build.start);
|
||||
"finish_time", `String (Ptime.to_rfc3339 build.finish);
|
||||
"main_binary", (match build.main_binary with Some _ -> `Bool true | None -> `Bool false)
|
||||
] |> Yojson.Basic.to_string
|
||||
in
|
||||
Dream.json ~status:`OK json_response |> Lwt_result.ok
|
||||
else
|
||||
Views.Job_build.make
|
||||
~job_name
|
||||
~build
|
||||
~artifacts
|
||||
~main_binary
|
||||
~solo5_manifest
|
||||
~same_input_same_output
|
||||
~different_input_same_output
|
||||
~same_input_different_output
|
||||
~latest ~next ~previous
|
||||
|> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let job_build_file req =
|
||||
let datadir = Dream.global datadir_global req in
|
||||
let _job_name = Dream.param "job" req
|
||||
and build = Dream.param "build" req
|
||||
and filepath = Dream.path req |> String.concat "/" in
|
||||
let if_none_match = Dream.header "if-none-match" req in
|
||||
let _job_name = Dream.param req "job"
|
||||
and build = Dream.param req "build"
|
||||
(* FIXME *)
|
||||
and filepath = begin[@alert "-deprecated"] Dream.path req |> String.concat "/" end in
|
||||
let if_none_match = Dream.header req "if-none-match" in
|
||||
(* XXX: We don't check safety of [file]. This should be fine however since
|
||||
* we don't use [file] for the filesystem but is instead used as a key for
|
||||
* lookup in the data table of the 'full' file. *)
|
||||
get_uuid build >>= fun build ->
|
||||
Fpath.of_string filepath |> Rresult.R.open_error_msg |> Lwt_result.lift
|
||||
Fpath.of_string filepath |> Lwt_result.lift
|
||||
|> if_error ~status:`Not_Found "File not found" >>= fun filepath ->
|
||||
Dream.sql req (Model.build_artifact build filepath)
|
||||
|> if_error "Error getting build artifact" >>= fun file ->
|
||||
let etag = Base64.encode_string (Cstruct.to_string file.Builder_db.sha256) in
|
||||
let etag = Base64.encode_string file.Builder_db.sha256 in
|
||||
match if_none_match with
|
||||
| Some etag' when etag = etag' ->
|
||||
Dream.empty `Not_Modified |> Lwt_result.ok
|
||||
| _ ->
|
||||
Model.build_artifact_data datadir file
|
||||
|> if_error "Error getting build artifact"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a in %a: %a"
|
||||
Fpath.pp file.Builder_db.filepath Fpath.pp file.Builder_db.localpath
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a: %a"
|
||||
Fpath.pp file.Builder_db.filepath
|
||||
pp_error e)) >>= fun data ->
|
||||
let headers = [
|
||||
"Content-Type", mime_lookup file.Builder_db.filepath;
|
||||
|
@ -176,13 +465,65 @@ let add_routes datadir =
|
|||
Dream.respond ~headers data |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let job_build_static_file (file : [< `Console | `Script ]) req =
|
||||
let _job_name = Dream.param req "job"
|
||||
and build = Dream.param req "build" in
|
||||
get_uuid build >>= fun build ->
|
||||
(match file with
|
||||
| `Console ->
|
||||
Dream.sql req (Model.build_console_by_uuid datadir build)
|
||||
| `Script ->
|
||||
Dream.sql req (Model.build_script_by_uuid datadir build))
|
||||
|> if_error "Error getting data"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting script or console data for build %a: %a"
|
||||
Uuidm.pp build pp_error e)) >>= fun data ->
|
||||
let headers = [ "Content-Type", "text/plain; charset=utf-8" ] in
|
||||
Dream.respond ~headers data |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let failed_builds req =
|
||||
let platform = Dream.query req "platform" in
|
||||
let to_int default s = Option.(value ~default (bind s int_of_string_opt)) in
|
||||
let start = to_int 0 (Dream.query req "start") in
|
||||
let count = to_int 10 (Dream.query req "count") in
|
||||
Dream.sql req (Model.failed_builds ~start ~count platform)
|
||||
|> if_error "Error getting data"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting failed builds: %a"
|
||||
pp_error e)) >>= fun builds ->
|
||||
Views.failed_builds ~start ~count builds
|
||||
|> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let job_build_targz req =
|
||||
let _job_name = Dream.param req "job"
|
||||
and build = Dream.param req "build" in
|
||||
get_uuid build >>= fun build ->
|
||||
Dream.sql req (Model.build build)
|
||||
|> if_error "Error getting build" >>= fun (build_id, build) ->
|
||||
Dream.sql req (Model.build_artifacts build_id)
|
||||
|> if_error "Error getting artifacts" >>= fun artifacts ->
|
||||
Ptime.diff build.finish Ptime.epoch |> Ptime.Span.to_int_s
|
||||
|> Option.to_result ~none:(`Msg "bad finish time") |> Result.map Int64.of_int
|
||||
|> Lwt.return |> if_error "Internal server error" >>= fun finish ->
|
||||
Dream.stream ~headers:["Content-Type", "application/tar+gzip"]
|
||||
(fun stream ->
|
||||
let+ r = Dream_tar.targz_response datadir finish artifacts stream in
|
||||
match r with
|
||||
| Ok () -> ()
|
||||
| Error _ ->
|
||||
Log.warn (fun m -> m "error assembling gzipped tar archive");
|
||||
())
|
||||
|> Lwt_result.ok
|
||||
in
|
||||
|
||||
let upload req =
|
||||
let* body = Dream.body req in
|
||||
Builder.Asn.exec_of_cs (Cstruct.of_string body) |> Lwt.return
|
||||
Builder.Asn.exec_of_str body |> Lwt.return
|
||||
|> if_error ~status:`Bad_Request "Bad request"
|
||||
~log:(fun e ->
|
||||
Log.warn (fun m -> m "Received bad builder ASN.1: %a" pp_error e))
|
||||
>>= fun (({ name ; _ }, uuid, _, _, _, _, _) as exec) ->
|
||||
Log.warn (fun m -> m "Received bad builder ASN.1");
|
||||
Log.debug (fun m -> m "Bad builder ASN.1: %a" pp_error e))
|
||||
>>= fun ((({ name ; _ } : Builder.script_job), uuid, _, _, _, _, _) as exec) ->
|
||||
Log.debug (fun m -> m "Received build %a" pp_exec exec);
|
||||
Authorization.authorized req name
|
||||
|> if_error ~status:`Forbidden "Forbidden" >>= fun () ->
|
||||
|
@ -194,58 +535,135 @@ let add_routes datadir =
|
|||
| true ->
|
||||
Log.warn (fun m -> m "Build with same uuid exists: %a" pp_exec exec);
|
||||
Dream.respond ~status:`Conflict
|
||||
(Fmt.strf "Build with same uuid exists: %a\n" Uuidm.pp uuid)
|
||||
(Fmt.str "Build with same uuid exists: %a\n" Uuidm.pp uuid)
|
||||
|> Lwt_result.ok
|
||||
| false ->
|
||||
let datadir = Dream.global datadir_global req in
|
||||
(Lwt.return (Dream.local Authorization.user_info_local req |>
|
||||
(Lwt.return (Dream.field req Authorization.user_info_field |>
|
||||
Option.to_result ~none:(`Msg "no authenticated user")) >>= fun (user_id, _) ->
|
||||
Dream.sql req (Model.add_build datadir user_id exec))
|
||||
Dream.sql req (Model.add_build ~datadir ~cachedir ~configdir user_id exec))
|
||||
|> if_error "Internal server error"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error saving build %a: %a" pp_exec exec pp_error e))
|
||||
>>= fun () -> Dream.respond "" |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let hash req =
|
||||
Dream.query "sha256" req |> Option.to_result ~none:(`Msg "Missing sha256 query parameter") |> Lwt.return
|
||||
Dream.query req "sha256" |> Option.to_result ~none:(`Msg "Missing sha256 query parameter")
|
||||
|> Lwt.return
|
||||
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash_hex ->
|
||||
begin try Hex.to_cstruct (`Hex hash_hex) |> Lwt_result.return
|
||||
begin try Ohex.decode hash_hex |> Lwt_result.return
|
||||
with Invalid_argument e -> Lwt_result.fail (`Msg ("Bad hex: " ^ e))
|
||||
end
|
||||
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash ->
|
||||
Dream.sql req (Model.build_hash hash) >>= Model.not_found
|
||||
|> if_error "Internal server error" >>= fun (job_name, build) ->
|
||||
Dream.redirect req
|
||||
(Fmt.strf "/job/%s/build/%a/" job_name Uuidm.pp build.Builder_db.Build.uuid)
|
||||
(Link.Job_build.make ~job_name ~build:build.Builder_db.Build.uuid ())
|
||||
|> Lwt_result.ok
|
||||
in
|
||||
|
||||
let compare_opam req =
|
||||
let datadir = Dream.global datadir_global req in
|
||||
let build_left = Dream.param "build_left" req in
|
||||
let build_right = Dream.param "build_right" req in
|
||||
|
||||
let resolve_artifact_size id_opt conn =
|
||||
match id_opt with
|
||||
| None -> Lwt.return_ok None
|
||||
| Some id ->
|
||||
Model.build_artifact_by_id id conn >|= fun file ->
|
||||
Some file.size
|
||||
|
||||
in
|
||||
|
||||
let process_comparison req =
|
||||
let build_left = Dream.param req "build_left" in
|
||||
let build_right = Dream.param req "build_right" in
|
||||
get_uuid build_left >>= fun build_left ->
|
||||
get_uuid build_right >>= fun build_right ->
|
||||
(Dream.sql req (Model.build build_left) >>= fun (_id, build_left) ->
|
||||
Dream.sql req (Model.build build_right) >>= fun (_id, build_right) ->
|
||||
Dream.sql req (Model.build_artifact build_left.Builder_db.Build.uuid (Fpath.v "opam-switch")) >>=
|
||||
Dream.sql req (fun conn ->
|
||||
Model.build build_left conn >>= fun (_id, build_left) ->
|
||||
Model.build build_right conn >>= fun (_id, build_right) ->
|
||||
Model.build_artifact build_left.Builder_db.Build.uuid (Fpath.v "opam-switch") conn >>=
|
||||
Model.build_artifact_data datadir >>= fun switch_left ->
|
||||
Dream.sql req (Model.build_artifact build_right.Builder_db.Build.uuid (Fpath.v "opam-switch")) >>=
|
||||
Model.build_artifact build_left.Builder_db.Build.uuid (Fpath.v "build-environment") conn >>=
|
||||
Model.build_artifact_data datadir >>= fun build_env_left ->
|
||||
Model.build_artifact build_left.Builder_db.Build.uuid (Fpath.v "system-packages") conn >>=
|
||||
Model.build_artifact_data datadir >>= fun system_packages_left ->
|
||||
Model.build_artifact build_right.Builder_db.Build.uuid (Fpath.v "opam-switch") conn >>=
|
||||
Model.build_artifact_data datadir >>= fun switch_right ->
|
||||
Dream.sql req (Model.job_name build_left.job_id) >>= fun job_left ->
|
||||
Dream.sql req (Model.job_name build_right.job_id) >|= fun job_right ->
|
||||
(job_left, job_right, build_left, build_right, switch_left, switch_right))
|
||||
Model.build_artifact build_right.Builder_db.Build.uuid (Fpath.v "build-environment") conn >>=
|
||||
Model.build_artifact_data datadir >>= fun build_env_right ->
|
||||
Model.build_artifact build_right.Builder_db.Build.uuid (Fpath.v "system-packages") conn >>=
|
||||
Model.build_artifact_data datadir >>= fun system_packages_right ->
|
||||
resolve_artifact_size build_left.Builder_db.Build.main_binary conn >>= fun build_left_file_size ->
|
||||
resolve_artifact_size build_right.Builder_db.Build.main_binary conn >>= fun build_right_file_size ->
|
||||
Model.job_name build_left.job_id conn >>= fun job_left ->
|
||||
Model.job_name build_right.job_id conn >|= fun job_right ->
|
||||
(job_left, job_right, build_left, build_right, build_left_file_size,
|
||||
build_right_file_size, switch_left, build_env_left, system_packages_left,
|
||||
switch_right, build_env_right, system_packages_right))
|
||||
|> if_error "Internal server error"
|
||||
>>= fun (job_left, job_right, build_left, build_right, switch_left, switch_right) ->
|
||||
>>= fun (job_left, job_right, build_left, build_right, build_left_file_size,
|
||||
build_right_file_size, switch_left, build_env_left, system_packages_left,
|
||||
switch_right, build_env_right, system_packages_right) ->
|
||||
let env_diff = Utils.compare_env build_env_left build_env_right
|
||||
and pkg_diff = Utils.compare_pkgs system_packages_left system_packages_right
|
||||
in
|
||||
let switch_left = OpamFile.SwitchExport.read_from_string switch_left
|
||||
and switch_right = OpamFile.SwitchExport.read_from_string switch_right in
|
||||
Opamdiff.compare switch_left switch_right
|
||||
|> Views.compare_opam job_left job_right build_left build_right
|
||||
let opam_diff = Opamdiff.compare switch_left switch_right in
|
||||
(job_left, job_right, build_left, build_right, build_left_file_size, build_right_file_size, env_diff, pkg_diff, opam_diff)
|
||||
|> Lwt.return_ok
|
||||
|
||||
in
|
||||
|
||||
let compare_builds req =
|
||||
process_comparison req >>= fun
|
||||
(job_left, job_right, build_left, build_right, build_left_file_size, build_right_file_size, env_diff, pkg_diff, opam_diff) ->
|
||||
if is_accept_json req then
|
||||
let file_size_json = Option.fold ~none:`Null ~some:(fun size -> `Int size) in
|
||||
let json_response =
|
||||
`Assoc [
|
||||
"left", `Assoc [
|
||||
"job_name", `String job_left;
|
||||
"uuid", `String (Uuidm.to_string build_left.uuid);
|
||||
"platform", `String build_left.platform;
|
||||
"start_time", `String (Ptime.to_rfc3339 build_left.start);
|
||||
"finish_time", `String (Ptime.to_rfc3339 build_left.finish);
|
||||
"main_binary", `Bool (Option.is_some build_left_file_size);
|
||||
"main_binary_size", file_size_json build_left_file_size;
|
||||
];
|
||||
"right", `Assoc [
|
||||
"job_name", `String job_right;
|
||||
"build", `String (Uuidm.to_string build_right.uuid);
|
||||
"platform", `String build_right.platform;
|
||||
"start_time", `String (Ptime.to_rfc3339 build_right.start);
|
||||
"finish_time", `String (Ptime.to_rfc3339 build_right.finish);
|
||||
"main_binary", `Bool (Option.is_some build_right_file_size);
|
||||
"main_binary_size", file_size_json build_right_file_size;
|
||||
];
|
||||
"env_diff", Utils.diff_map_to_json env_diff;
|
||||
"package_diff", Utils.diff_map_to_json pkg_diff;
|
||||
"opam_diff", Opamdiff.compare_to_json opam_diff
|
||||
] |> Yojson.Basic.to_string
|
||||
in
|
||||
Dream.json ~status:`OK json_response |> Lwt_result.ok
|
||||
else
|
||||
Views.compare_builds
|
||||
~job_left ~job_right
|
||||
~build_left ~build_right
|
||||
~env_diff
|
||||
~pkg_diff
|
||||
~opam_diff
|
||||
|> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let upload_binary req =
|
||||
let job = Dream.param "job" req in
|
||||
let job = Dream.param req "job" in
|
||||
let platform = Dream.param req "platform" in
|
||||
let binary_name =
|
||||
Dream.query req "binary_name"
|
||||
|> Option.map Fpath.of_string
|
||||
|> Option.value ~default:(Ok Fpath.(v job + "bin"))
|
||||
in
|
||||
if_error "Bad request" ~status:`Bad_Request (Lwt.return binary_name) >>=
|
||||
fun binary_name ->
|
||||
let* body = Dream.body req in
|
||||
Authorization.authorized req job
|
||||
|> if_error ~status:`Forbidden "Forbidden" >>= fun () ->
|
||||
|
@ -258,78 +676,105 @@ let add_routes datadir =
|
|||
| true ->
|
||||
Log.warn (fun m -> m "Build %S with same uuid exists: %a" job Uuidm.pp uuid);
|
||||
Dream.respond ~status:`Conflict
|
||||
(Fmt.strf "Build with same uuid exists: %a\n" Uuidm.pp uuid)
|
||||
(Fmt.str "Build with same uuid exists: %a\n" Uuidm.pp uuid)
|
||||
|> Lwt_result.ok
|
||||
| false ->
|
||||
let datadir = Dream.global datadir_global req in
|
||||
let exec =
|
||||
let now = Ptime_clock.now () in
|
||||
({ Builder.name = job ; script = "" ; files = [] }, uuid, [], now, now, Builder.Exited 0,
|
||||
[ (Fpath.(v "bin" / job + "bin"), body) ])
|
||||
({ Builder.name = job ; platform ; script = "" }, uuid, [], now, now, Builder.Exited 0,
|
||||
[ (Fpath.(v "bin" // binary_name), body) ])
|
||||
in
|
||||
(Lwt.return (Dream.local Authorization.user_info_local req |>
|
||||
(Lwt.return (Dream.field req Authorization.user_info_field |>
|
||||
Option.to_result ~none:(`Msg "no authenticated user")) >>= fun (user_id, _) ->
|
||||
Dream.sql req (Model.add_build datadir user_id exec))
|
||||
Dream.sql req (Model.add_build ~datadir ~cachedir ~configdir user_id exec))
|
||||
|> if_error "Internal server error"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error saving build %a: %a" pp_exec exec pp_error e))
|
||||
>>= fun () -> Dream.respond "" |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let login req =
|
||||
let username = Dream.session "username" req in
|
||||
Views.login username (Dream.csrf_token req) |> string_of_html |> Dream.html
|
||||
in
|
||||
let w f req = or_error_response req (f req) in
|
||||
|
||||
let do_login req =
|
||||
let* form = Dream.form req in
|
||||
(match form with
|
||||
| `Ok [ "password", password; "user", username ] ->
|
||||
Lwt.return (Ok (username, password))
|
||||
| _ ->
|
||||
Lwt.return (Error ("Bad request", `Bad_Request))) >>= fun (username, password) ->
|
||||
Dream.sql req (Model.user username)
|
||||
|> if_error "Internal server error" >>= fun user_info ->
|
||||
match user_info with
|
||||
| Some (_user_id, user_info) ->
|
||||
if Builder_web_auth.verify_password password user_info
|
||||
then
|
||||
let* () = Dream.invalidate_session req in
|
||||
let* () = Dream.put_session "username" user_info.Builder_web_auth.username req in
|
||||
Dream.redirect req "/" |> Lwt_result.ok
|
||||
else
|
||||
Dream.redirect req "/login" |> Lwt_result.ok
|
||||
| None ->
|
||||
let _ = Builder_web_auth.hash ~username ~password ~restricted:true () in
|
||||
Dream.redirect req "/login" |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let do_logout req =
|
||||
let* form = Dream.form req in
|
||||
match form with
|
||||
| `Ok [] ->
|
||||
let* () = Dream.invalidate_session req in
|
||||
Dream.redirect req "/"
|
||||
| _ ->
|
||||
Log.warn (fun m -> m "Bad logout");
|
||||
Dream.redirect req "/"
|
||||
in
|
||||
|
||||
let w f req = or_error_response (f req) in
|
||||
|
||||
Dream.pipeline [
|
||||
Dream.sql_sessions;
|
||||
Dream.router [
|
||||
Dream.get "/" (w builder);
|
||||
Dream.get "/job/:job/" (w job);
|
||||
Dream.get "/job/:job/build/latest/**" (w redirect_latest);
|
||||
Dream.get "/job/:job/build/:build/" (w job_build);
|
||||
Dream.get "/job/:job/build/:build/f/**" (w job_build_file);
|
||||
Dream.get "/hash" (w hash);
|
||||
Dream.get "/compare/:build_left/:build_right/opam-switch" (w compare_opam);
|
||||
Dream.post "/upload" (Authorization.authenticate (w upload));
|
||||
Dream.post "/job/:job/upload" (Authorization.authenticate (w upload_binary));
|
||||
Dream.get "/login" login;
|
||||
Dream.post "/login" (w do_login);
|
||||
Dream.post "/logout" do_logout;
|
||||
];
|
||||
[
|
||||
`Get, "/", (w (builds ~all:false ~filter_builds_later_than:expired_jobs));
|
||||
`Get, "/job/:job", (w job);
|
||||
`Get, "/job/:job/failed", (w job_with_failed);
|
||||
`Get, "/job/:job/build/latest/**", (w redirect_latest);
|
||||
`Get, "/job/:job/build/latest", (w redirect_latest_no_slash);
|
||||
`Get, "/job/:job/build/:build", (w job_build);
|
||||
`Get, "/job/:job/build/:build/f/**", (w job_build_file);
|
||||
`Get, "/job/:job/build/:build/main-binary", (w redirect_main_binary);
|
||||
`Get, "/job/:job/build/:build/viztreemap", (w @@ job_build_viz `Treemap);
|
||||
`Get, "/job/:job/build/:build/vizdependencies", (w @@ job_build_viz `Dependencies);
|
||||
`Get, "/job/:job/build/:build/script", (w (job_build_static_file `Script));
|
||||
`Get, "/job/:job/build/:build/console", (w (job_build_static_file `Console));
|
||||
`Get, "/job/:job/build/:build/all.tar.gz", (w job_build_targz);
|
||||
`Get, "/failed-builds", (w failed_builds);
|
||||
`Get, "/all-builds", (w (builds ~all:true));
|
||||
`Get, "/hash", (w hash);
|
||||
`Get, "/compare/:build_left/:build_right", (w compare_builds);
|
||||
`Post, "/upload", (Authorization.authenticate (w upload));
|
||||
`Post, "/job/:job/platform/:platform/upload", (Authorization.authenticate (w upload_binary));
|
||||
]
|
||||
|
||||
let to_dream_route = function
|
||||
| `Get, path, handler -> Dream.get path handler
|
||||
| `Post, path, handler -> Dream.post path handler
|
||||
|
||||
let to_dream_routes l = List.map to_dream_route l
|
||||
|
||||
let routeprefix_ignorelist_when_removing_trailing_slash = [
|
||||
"/job/:job/build/:build/f";
|
||||
"/job/:job/build/latest";
|
||||
]
|
||||
|
||||
module Middleware = struct
|
||||
|
||||
let remove_trailing_url_slash : Dream.middleware =
|
||||
fun handler req ->
|
||||
let path = Dream.target req |> Utils.Path.of_url in
|
||||
let is_ignored =
|
||||
routeprefix_ignorelist_when_removing_trailing_slash
|
||||
|> List.exists (Utils.Path.matches_dreamroute ~path)
|
||||
in
|
||||
if not (List.mem (Dream.method_ req) [`GET; `HEAD]) || is_ignored then
|
||||
handler req
|
||||
else match List.rev path with
|
||||
| "" :: [] (* / *) -> handler req
|
||||
| "" :: path (* /.../ *) ->
|
||||
let path = List.rev path in
|
||||
let queries = Dream.all_queries req in
|
||||
let url = Utils.Path.to_url ~path ~queries in
|
||||
(*> Note: See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Location*)
|
||||
Dream.redirect ~status:`Moved_Permanently req url
|
||||
| _ (* /... *) -> handler req
|
||||
|
||||
end
|
||||
|
||||
let is_iframe_page ~req =
|
||||
match Option.bind req (fun r -> Dream.header r "Sec-Fetch-Dest") with
|
||||
| Some "iframe" -> true
|
||||
| _ -> false
|
||||
|
||||
let error_template error _debug_info suggested_response =
|
||||
let target =
|
||||
match error.Dream.request with
|
||||
| None -> "?"
|
||||
| Some req -> Dream.target req in
|
||||
let referer =
|
||||
Option.bind error.Dream.request (fun req -> Dream.header req "referer")
|
||||
in
|
||||
match Dream.status suggested_response with
|
||||
| `Not_Found ->
|
||||
let html =
|
||||
if is_iframe_page ~req:error.Dream.request then
|
||||
Views.viz_not_found
|
||||
else
|
||||
Views.page_not_found ~target ~referer
|
||||
in
|
||||
Dream.set_header suggested_response "Content-Type" Dream.text_html;
|
||||
Dream.set_body suggested_response @@ string_of_html html;
|
||||
Lwt.return suggested_response
|
||||
| _ ->
|
||||
Lwt.return suggested_response
|
||||
|
||||
module Link = Link
|
||||
|
|
92
lib/dream_tar.ml
Normal file
92
lib/dream_tar.ml
Normal file
|
@ -0,0 +1,92 @@
|
|||
module High : sig
|
||||
type t
|
||||
type 'a s = 'a Lwt.t
|
||||
|
||||
external inj : 'a s -> ('a, t) Tar.io = "%identity"
|
||||
external prj : ('a, t) Tar.io -> 'a s = "%identity"
|
||||
end = struct
|
||||
type t
|
||||
type 'a s = 'a Lwt.t
|
||||
|
||||
external inj : 'a -> 'b = "%identity"
|
||||
external prj : 'a -> 'b = "%identity"
|
||||
end
|
||||
|
||||
let value v = Tar.High (High.inj v)
|
||||
|
||||
let ok_value v = value (Lwt_result.ok v)
|
||||
|
||||
let run t stream =
|
||||
let rec run : type a. (a, 'err, High.t) Tar.t -> (a, 'err) result Lwt.t =
|
||||
function
|
||||
| Tar.Write str ->
|
||||
(* Can this not fail?!? Obviously, it can, but we never know?? *)
|
||||
Lwt_result.ok (Dream.write stream str)
|
||||
| Tar.Seek _ | Tar.Read _ | Tar.Really_read _ -> assert false
|
||||
| Tar.Return value -> Lwt.return value
|
||||
| Tar.High value -> High.prj value
|
||||
| Tar.Bind (x, f) ->
|
||||
let open Lwt_result.Syntax in
|
||||
let* v = run x in
|
||||
run (f v)
|
||||
in
|
||||
run t
|
||||
|
||||
let header_of_file mod_time (file : Builder_db.file) =
|
||||
let file_mode = if Fpath.is_prefix Fpath.(v "bin/") file.filepath then
|
||||
0o755
|
||||
else
|
||||
0o644
|
||||
in
|
||||
Tar.Header.make ~file_mode ~mod_time (Fpath.to_string file.filepath) (Int64.of_int file.size)
|
||||
|
||||
let contents datadir file : unit -> (string option, _, _) Tar.t =
|
||||
let state = ref `Initial in
|
||||
let dispenser () =
|
||||
let ( let* ) = Tar.( let* ) in
|
||||
let src = Fpath.append datadir (Model.artifact_path file) in
|
||||
let* state' =
|
||||
match !state with
|
||||
| `Initial ->
|
||||
let* fd = ok_value (Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string src)) in
|
||||
let s = `Active fd in
|
||||
state := s; Tar.return (Ok s)
|
||||
| `Active _ | `Closed as s -> Tar.return (Ok s)
|
||||
in
|
||||
match state' with
|
||||
| `Closed -> Tar.return (Ok None)
|
||||
| `Active fd ->
|
||||
let* data = ok_value (Lwt_io.read ~count:65536 fd) in
|
||||
if String.length data = 0 then begin
|
||||
state := `Closed;
|
||||
let* () = ok_value (Lwt_io.close fd) in
|
||||
Tar.return (Ok None)
|
||||
end else
|
||||
Tar.return (Ok (Some data))
|
||||
in
|
||||
dispenser
|
||||
|
||||
let entries datadir finish files =
|
||||
let files =
|
||||
List.map (fun file ->
|
||||
let hdr = header_of_file finish file in
|
||||
let level = Some Tar.Header.Posix in
|
||||
(level, hdr, contents datadir file)
|
||||
)
|
||||
files
|
||||
in
|
||||
let files = ref files in
|
||||
fun () -> match !files with
|
||||
| [] -> Tar.return (Ok None)
|
||||
| f :: fs -> files := fs; Tar.return (Ok (Some f))
|
||||
|
||||
let targz_response datadir finish files stream =
|
||||
let entries : (_, _) Tar.entries = entries datadir finish files in
|
||||
let global_hdr =
|
||||
Tar.Header.Extended.make
|
||||
~comment:"Tar file produced by builder-web.%%VERSION_NUM%%"
|
||||
()
|
||||
in
|
||||
let finish32 = Int64.to_int32 finish in
|
||||
Logs.err (fun m -> m "finished at %ld (%Ld)" finish32 finish);
|
||||
run (Tar_gz.out_gzipped ~level:9 ~mtime:finish32 Gz.Unix (Tar.out ~global_hdr entries)) stream
|
4
lib/dune
4
lib/dune
|
@ -1,3 +1,5 @@
|
|||
(library
|
||||
(name builder_web)
|
||||
(libraries builder builder_db dream tyxml bos rresult duration hex caqti-lwt opamdiff ptime.clock.os))
|
||||
(libraries builder builder_db dream tyxml bos duration ohex caqti-lwt
|
||||
opamdiff ptime.clock.os cmarkit tar tar.gz tar-unix owee solo5-elftool decompress.de
|
||||
decompress.gz uri digestif))
|
||||
|
|
89
lib/link.ml
Normal file
89
lib/link.ml
Normal file
|
@ -0,0 +1,89 @@
|
|||
let fpath_url_pp ppf f =
|
||||
Fpath.segs f
|
||||
|> List.map Uri.pct_encode
|
||||
|> Fmt.(list ~sep:(any "/") string) ppf
|
||||
|
||||
module Queries_aux = struct
|
||||
|
||||
let flatten = Option.value ~default:[]
|
||||
|
||||
let add_raw url_str queries =
|
||||
let uri = Uri.of_string url_str in
|
||||
let uri = Uri.add_query_params uri queries in
|
||||
Uri.to_string uri
|
||||
|
||||
let add ~encode_query queries url_str =
|
||||
queries |> flatten |> List.map encode_query |> add_raw url_str
|
||||
|
||||
end
|
||||
|
||||
let pctencode fmt str = Format.fprintf fmt "%s" (Uri.pct_encode str)
|
||||
|
||||
module Root = struct
|
||||
|
||||
let make () = "/"
|
||||
|
||||
end
|
||||
|
||||
module Job = struct
|
||||
|
||||
let encode_query = function
|
||||
| `Platform p -> "platform", [ p ]
|
||||
|
||||
let make ?queries ~job_name () =
|
||||
Fmt.str "/job/%a" pctencode job_name
|
||||
|> Queries_aux.add ~encode_query queries
|
||||
|
||||
let make_failed ?queries ~job_name () =
|
||||
Fmt.str "/job/%a/failed" pctencode job_name
|
||||
|> Queries_aux.add ~encode_query queries
|
||||
|
||||
end
|
||||
|
||||
module Job_build = struct
|
||||
|
||||
let make ~job_name ~build () =
|
||||
Fmt.str "/job/%a/build/%a"
|
||||
pctencode job_name
|
||||
Uuidm.pp build
|
||||
|
||||
end
|
||||
|
||||
module Job_build_artifact = struct
|
||||
|
||||
let encode_artifact = function
|
||||
| `Main_binary -> "/main-binary"
|
||||
| `Viz_treemap -> "/viztreemap"
|
||||
| `Viz_dependencies -> "/vizdependencies"
|
||||
| `Script -> "/script"
|
||||
| `Console -> "/console"
|
||||
| `All_targz -> "/all.tar.gz"
|
||||
| `File f -> "/f/" ^ Fmt.to_to_string fpath_url_pp f
|
||||
|
||||
let make_from_string ~job_name ~build ~artifact () =
|
||||
Fmt.str "/job/%a/build/%a%s"
|
||||
pctencode job_name
|
||||
Uuidm.pp build
|
||||
artifact
|
||||
|
||||
let make ~job_name ~build ~artifact () =
|
||||
let artifact = encode_artifact artifact in
|
||||
make_from_string ~job_name ~build ~artifact ()
|
||||
|
||||
end
|
||||
|
||||
module Compare_builds = struct
|
||||
|
||||
let make ~left ~right () =
|
||||
Fmt.str "/compare/%a/%a"
|
||||
Uuidm.pp left
|
||||
Uuidm.pp right
|
||||
|
||||
end
|
||||
|
||||
module Failed_builds = struct
|
||||
|
||||
let make ~count ~start () =
|
||||
Fmt.str "/failed-builds?count=%d&start=%d" count start
|
||||
|
||||
end
|
468
lib/model.ml
468
lib/model.ml
|
@ -15,78 +15,151 @@ let pp_error ppf = function
|
|||
Caqti_error.pp ppf e
|
||||
|
||||
let not_found = function
|
||||
| None -> Lwt.return (Error `Not_found :> (_, [> error ]) result)
|
||||
| None -> Lwt_result.fail `Not_found
|
||||
| Some v -> Lwt_result.return v
|
||||
|
||||
let staging datadir = Fpath.(datadir / "_staging")
|
||||
let artifact_path artifact =
|
||||
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
|
||||
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
|
||||
(* NOTE: We add the prefix to reduce the number of files in a directory - a
|
||||
workaround for inferior filesystems. We can easily revert this by changing
|
||||
this function and adding a migration. *)
|
||||
let prefix = String.sub sha256 0 2 in
|
||||
Fpath.(v "_artifacts" / prefix / sha256)
|
||||
|
||||
let read_file datadir filepath =
|
||||
let filepath = Fpath.(datadir // filepath) in
|
||||
Lwt.try_bind
|
||||
(fun () -> Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string filepath))
|
||||
(fun ic -> Lwt_result.ok (Lwt_io.read ic))
|
||||
(fun ic ->
|
||||
let open Lwt.Infix in
|
||||
Lwt_io.read ic >>= fun data ->
|
||||
Lwt_io.close ic >>= fun () ->
|
||||
Lwt_result.return data)
|
||||
(function
|
||||
| Unix.Unix_error (e, _, _) ->
|
||||
Logs.warn (fun m -> m "Error reading local file %a: %s"
|
||||
Log.warn (fun m -> m "Error reading local file %a: %s"
|
||||
Fpath.pp filepath (Unix.error_message e));
|
||||
Lwt.return_error (`File_error filepath)
|
||||
| e -> Lwt.fail e)
|
||||
| e -> Lwt.reraise e)
|
||||
|
||||
let build_artifact build filepath (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build_artifact.get_by_build_uuid (build, filepath)
|
||||
>>= not_found >|= snd
|
||||
|
||||
let build_artifact_by_id id (module Db : CONN) =
|
||||
Db.find Builder_db.Build_artifact.get id
|
||||
|
||||
let build_artifact_data datadir file =
|
||||
read_file datadir file.Builder_db.localpath
|
||||
read_file datadir (artifact_path file)
|
||||
|
||||
let build_artifacts build (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Build_artifact.get_all_by_build build >|=
|
||||
List.map snd
|
||||
|
||||
let solo5_manifest datadir file =
|
||||
let buf = Owee_buf.map_binary Fpath.(to_string (datadir // artifact_path file)) in
|
||||
Solo5_elftool.query_manifest buf |> Result.to_option
|
||||
|
||||
let platforms_of_job id (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Build.get_platforms_for_job id
|
||||
|
||||
let build uuid (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_by_uuid uuid >>=
|
||||
not_found
|
||||
|
||||
let build_meta job (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_latest job >|=
|
||||
Option.map (fun (_id, meta, file) -> (meta, file))
|
||||
let build_with_main_binary job platform (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_latest_successful_with_binary (job, platform) >|=
|
||||
Option.map (fun (_id, build, file) -> (build, file))
|
||||
|
||||
let build_hash hash (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_by_hash hash
|
||||
Db.find_opt Builder_db.Build.get_with_jobname_by_hash hash
|
||||
|
||||
let build_exists uuid (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_by_uuid uuid >|=
|
||||
Option.is_some
|
||||
|
||||
let latest_build_uuid job_id (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_latest_uuid job_id >>=
|
||||
(* We know there's at least one job when this is called, probably. *)
|
||||
not_found >|= snd
|
||||
let latest_successful_build job_id platform (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_latest_successful (job_id, platform)
|
||||
|
||||
let latest_successful_build_uuid job_id (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_latest_successful_uuid job_id
|
||||
let latest_successful_build_uuid job_id platform db =
|
||||
latest_successful_build job_id platform db >|= fun build ->
|
||||
Option.map (fun build -> build.Builder_db.Build.uuid) build
|
||||
|
||||
let previous_successful_build id (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_previous_successful id >|=
|
||||
Option.map (fun (_id, meta) -> meta)
|
||||
let previous_successful_build_different_output id (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_previous_successful_different_output id
|
||||
|
||||
let main_binary id main_binary (module Db : CONN) =
|
||||
match main_binary with
|
||||
| None -> Lwt_result.return None
|
||||
| Some main_binary ->
|
||||
Db.find Builder_db.Build_artifact.get_by_build (id, main_binary) >|= fun (_id, file) ->
|
||||
Some file
|
||||
let next_successful_build_different_output id (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_next_successful_different_output id
|
||||
|
||||
let failed_builds ~start ~count platform (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Build.get_all_failed (start, count, platform)
|
||||
|
||||
let builds_with_different_input_and_same_main_binary id (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Build.get_different_input_same_output_input_ids id >>= fun ids ->
|
||||
Lwt_list.fold_left_s (fun acc input_id ->
|
||||
match acc with
|
||||
| Error _ as e -> Lwt.return e
|
||||
| Ok metas ->
|
||||
Db.find Builder_db.Build.get_one_by_input_id input_id >>= fun build ->
|
||||
Lwt.return (Ok (build :: metas)))
|
||||
(Ok []) ids
|
||||
|
||||
let builds_with_same_input_and_same_main_binary id (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Build.get_same_input_same_output_builds id
|
||||
|
||||
let builds_with_same_input_and_different_main_binary id (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Build.get_same_input_different_output_hashes id >>= fun hashes ->
|
||||
Lwt_list.fold_left_s (fun acc hash ->
|
||||
match acc with
|
||||
| Error _ as e -> Lwt.return e
|
||||
| Ok metas ->
|
||||
Db.find Builder_db.Build.get_by_hash hash >>= fun build ->
|
||||
Lwt.return (Ok (build :: metas)))
|
||||
(Ok []) hashes
|
||||
|
||||
let build_console_by_uuid datadir uuid (module Db : CONN) =
|
||||
build uuid (module Db) >>= fun (_id, { Builder_db.Build.console; _ })->
|
||||
read_file datadir console
|
||||
|
||||
let build_script_by_uuid datadir uuid (module Db : CONN) =
|
||||
build uuid (module Db) >>= fun (_id, { Builder_db.Build.script; _ })->
|
||||
read_file datadir script
|
||||
|
||||
let job_id job_name (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name
|
||||
|
||||
let job job (module Db : CONN) =
|
||||
let readme job (module Db : CONN) =
|
||||
job_id job (module Db) >>= not_found >>= fun job_id ->
|
||||
Db.collect_list Builder_db.Build.get_all_meta job_id >|=
|
||||
List.map (fun (_id, meta, main_binary) -> (meta, main_binary))
|
||||
Db.find Builder_db.Tag.get_id_by_name "readme.md" >>= fun readme_id ->
|
||||
Db.find_opt Builder_db.Job_tag.get_value (readme_id, job_id)
|
||||
|
||||
let jobs (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Job.get_all ()
|
||||
let job_and_readme job (module Db : CONN) =
|
||||
job_id job (module Db) >>= not_found >>= fun job_id ->
|
||||
Db.find Builder_db.Tag.get_id_by_name "readme.md" >>= fun readme_id ->
|
||||
Db.find_opt Builder_db.Job_tag.get_value (readme_id, job_id) >|= fun readme ->
|
||||
job_id, readme
|
||||
|
||||
let builds_grouped_by_output job_id platform (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Build.get_all_artifact_sha (job_id, platform) >>= fun sha ->
|
||||
Lwt_list.fold_left_s (fun acc hash ->
|
||||
match acc with
|
||||
| Error _ as e -> Lwt.return e
|
||||
| Ok builds ->
|
||||
Db.find Builder_db.Build.get_with_main_binary_by_hash hash >|= fun b ->
|
||||
b :: builds)
|
||||
(Ok []) sha >|= List.rev
|
||||
|
||||
let builds_grouped_by_output_with_failed job_id platform ((module Db : CONN) as db) =
|
||||
builds_grouped_by_output job_id platform db >>= fun builds ->
|
||||
Db.collect_list Builder_db.Build.get_failed_builds (job_id, platform) >|= fun failed ->
|
||||
let failed = List.map (fun b -> b, None) failed in
|
||||
let cmp (a, _) (b, _) = Ptime.compare b.Builder_db.Build.start a.Builder_db.Build.start in
|
||||
List.merge cmp builds failed
|
||||
|
||||
let jobs_with_section_synopsis (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Job.get_all_with_section_synopsis ()
|
||||
|
||||
let job_name id (module Db : CONN) =
|
||||
Db.find Builder_db.Job.get id
|
||||
|
@ -131,74 +204,162 @@ let cleanup_staging datadir (module Db : Caqti_lwt.CONNECTION) =
|
|||
(cleanup_staged staged))
|
||||
stageds
|
||||
|
||||
let save file data =
|
||||
let save path data =
|
||||
let open Lwt.Infix in
|
||||
Lwt.catch
|
||||
(fun () ->
|
||||
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string file) >>= fun oc ->
|
||||
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string path) >>= fun oc ->
|
||||
Lwt_io.write oc data >>= fun () ->
|
||||
Lwt_io.close oc
|
||||
|> Lwt_result.ok)
|
||||
(function
|
||||
| Unix.Unix_error (e, _, _) ->
|
||||
Lwt_result.fail (`Msg (Unix.error_message e))
|
||||
| e -> Lwt.fail e)
|
||||
| e -> Lwt.reraise e)
|
||||
|
||||
let save_exec build_dir exec =
|
||||
let cs = Builder.Asn.exec_to_cs exec in
|
||||
save Fpath.(build_dir / "full") (Cstruct.to_string cs)
|
||||
|
||||
let save_file dir staging (filepath, data) =
|
||||
let size = String.length data in
|
||||
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||
let localpath = Fpath.append dir filepath in
|
||||
let destpath = Fpath.append staging filepath in
|
||||
Lwt_result.lift (Bos.OS.Dir.create (Fpath.parent destpath)) >>= fun _ ->
|
||||
save destpath data >|= fun () ->
|
||||
{ Builder_db.filepath; localpath; sha256; size }
|
||||
|
||||
let save_files dir staging files =
|
||||
let save_artifacts staging artifacts =
|
||||
List.fold_left
|
||||
(fun r file ->
|
||||
r >>= fun acc ->
|
||||
save_file dir staging file >>= fun file ->
|
||||
Lwt_result.return (file :: acc))
|
||||
(Lwt_result.return [])
|
||||
files
|
||||
(fun r (file, data) ->
|
||||
r >>= fun () ->
|
||||
let sha256 = Ohex.encode file.Builder_db.sha256 in
|
||||
let destpath = Fpath.(staging / sha256) in
|
||||
save destpath data)
|
||||
(Lwt_result.return ())
|
||||
artifacts
|
||||
|
||||
let save_all staging_dir ((job, uuid, _, _, _, _, artifacts) as exec) =
|
||||
let build_dir = Fpath.(v job.Builder.name / Uuidm.to_string uuid) in
|
||||
let input_dir = Fpath.(build_dir / "input")
|
||||
and staging_input_dir = Fpath.(staging_dir / "input") in
|
||||
let output_dir = Fpath.(build_dir / "output")
|
||||
and staging_output_dir = Fpath.(staging_dir / "output") in
|
||||
Lwt.return (Bos.OS.Dir.create staging_dir) >>= (fun created ->
|
||||
if not created
|
||||
then Lwt_result.fail (`Msg "build directory already exists")
|
||||
else Lwt_result.return ()) >>= fun () ->
|
||||
Lwt.return (Bos.OS.Dir.create staging_input_dir) >>= fun _ ->
|
||||
Lwt.return (Bos.OS.Dir.create staging_output_dir) >>= fun _ ->
|
||||
save_exec staging_dir exec >>= fun () ->
|
||||
save_files output_dir staging_output_dir artifacts >>= fun artifacts ->
|
||||
save_files input_dir staging_input_dir job.Builder.files >>= fun input_files ->
|
||||
Lwt_result.return (artifacts, input_files)
|
||||
|
||||
let commit_files datadir staging_dir job_name uuid =
|
||||
let commit_files datadir staging_dir job_name uuid artifacts =
|
||||
(* First we move the artifacts *)
|
||||
List.fold_left
|
||||
(fun r artifact ->
|
||||
r >>= fun () ->
|
||||
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
|
||||
let src = Fpath.(staging_dir / sha256) in
|
||||
let dest = Fpath.(datadir // artifact_path artifact) in
|
||||
Lwt.return (Bos.OS.Dir.create (Fpath.parent dest)) >>= fun _created ->
|
||||
Lwt.return (Bos.OS.Path.move ~force:true src dest))
|
||||
(Lwt_result.return ())
|
||||
artifacts >>= fun () ->
|
||||
(* Now the staging dir only contains script & console *)
|
||||
let job_dir = Fpath.(datadir / job_name) in
|
||||
let dest = Fpath.(job_dir / Uuidm.to_string uuid) in
|
||||
Lwt.return (Bos.OS.Dir.create job_dir) >>= fun _ ->
|
||||
Lwt.return (Bos.OS.Path.move staging_dir dest)
|
||||
|
||||
let infer_section_and_synopsis artifacts =
|
||||
let infer_synopsis_and_descr switch root =
|
||||
match OpamPackage.Name.Map.find_opt root.OpamPackage.name switch.OpamFile.SwitchExport.overlays with
|
||||
| None -> None, None
|
||||
| Some opam -> OpamFile.OPAM.synopsis opam, OpamFile.OPAM.descr_body opam
|
||||
in
|
||||
let infer_section switch root =
|
||||
let root_pkg = root.OpamPackage.name in
|
||||
let is_unikernel =
|
||||
(* since mirage 4.2.0, the x-mirage-opam-lock-location is emitted *)
|
||||
Option.value ~default:false
|
||||
(Option.map (fun opam ->
|
||||
Option.is_some (OpamFile.OPAM.extended opam "x-mirage-opam-lock-location" Fun.id))
|
||||
(OpamPackage.Name.Map.find_opt root_pkg switch.OpamFile.SwitchExport.overlays))
|
||||
in
|
||||
let root_pkg_name = OpamPackage.Name.to_string root_pkg in
|
||||
if is_unikernel || Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
|
||||
let metrics_influx =
|
||||
let influx = OpamPackage.Name.of_string "metrics-influx" in
|
||||
OpamPackage.Set.exists (fun p -> OpamPackage.Name.equal p.OpamPackage.name influx)
|
||||
switch.OpamFile.SwitchExport.selections.OpamTypes.sel_installed
|
||||
in
|
||||
let mirage_monitoring =
|
||||
let monitoring = OpamPackage.Name.of_string "mirage-monitoring" in
|
||||
match OpamPackage.Name.Map.find_opt root_pkg switch.OpamFile.SwitchExport.overlays with
|
||||
| None -> false
|
||||
| Some opam ->
|
||||
let depends = OpamFile.OPAM.depends opam in
|
||||
OpamFormula.fold_left (fun acc (n', _) ->
|
||||
acc || OpamPackage.Name.equal n' monitoring)
|
||||
false depends
|
||||
in
|
||||
if metrics_influx || mirage_monitoring then
|
||||
"Unikernels (with metrics reported to Influx)"
|
||||
else
|
||||
"Unikernels"
|
||||
else
|
||||
"Packages"
|
||||
in
|
||||
match List.find_opt (fun (p, _) -> String.equal (Fpath.basename p) "opam-switch") artifacts with
|
||||
| None -> None, (None, None)
|
||||
| Some (_, data) ->
|
||||
try
|
||||
let switch = OpamFile.SwitchExport.read_from_string data in
|
||||
let root = switch.OpamFile.SwitchExport.selections.OpamTypes.sel_roots in
|
||||
assert (OpamPackage.Set.cardinal root = 1);
|
||||
let root = OpamPackage.Set.choose root in
|
||||
Some (infer_section switch root), infer_synopsis_and_descr switch root
|
||||
with _ -> None, (None, None)
|
||||
|
||||
let compute_input_id artifacts =
|
||||
let get_hash filename =
|
||||
match List.find_opt (fun b -> Fpath.equal b.Builder_db.filepath filename) artifacts with
|
||||
| None -> None
|
||||
| Some x -> Some x.sha256
|
||||
in
|
||||
match
|
||||
get_hash (Fpath.v "opam-switch"),
|
||||
get_hash (Fpath.v "build-environment"),
|
||||
get_hash (Fpath.v "system-packages")
|
||||
with
|
||||
| Some a, Some b, Some c ->
|
||||
Some Digestif.SHA256.(to_raw_string (digestv_string [a;b;c]))
|
||||
| _ -> None
|
||||
|
||||
let save_console_and_script staging_dir job_name uuid console script =
|
||||
let out name = Fpath.(v job_name / Uuidm.to_string uuid / name + "txt") in
|
||||
let out_staging name = Fpath.(staging_dir / name + "txt") in
|
||||
let console_to_string console =
|
||||
List.rev_map (fun (delta, data) ->
|
||||
Printf.sprintf "%.3fs:%s\n" (Duration.to_f (Int64.of_int delta)) data)
|
||||
console
|
||||
|> String.concat ""
|
||||
in
|
||||
save (out_staging "script") script >>= fun () ->
|
||||
save (out_staging "console") (console_to_string console) >|= fun () ->
|
||||
(out "console", out "script")
|
||||
|
||||
let prepare_staging staging_dir =
|
||||
Lwt.return (Bos.OS.Dir.create staging_dir) >>= fun created ->
|
||||
if not created
|
||||
then Lwt_result.fail (`Msg "build directory already exists")
|
||||
else Lwt_result.return ()
|
||||
|
||||
(* saving:
|
||||
- for each artifact compute its sha256 checksum -- calling Lwt.pause in
|
||||
between
|
||||
- lookup artifact sha256 in the database and filter them out of the list: not_in_db
|
||||
- mkdir -p _staging/uuid/
|
||||
- save console & script to _staging/uuid/
|
||||
- save each artifact in not_in_db as _staging/uuid/sha256
|
||||
committing:
|
||||
- for each artifact mv _staging/uuid/sha256 _artifacts/sha256
|
||||
(or _artifacts/prefix(sha256)/sha256 where prefix(sha256) is the first two hex digits in sha256)
|
||||
- now _staging/uuid only contains console & script so we mv _staging/uuid _staging/job/uuid
|
||||
potential issues:
|
||||
- race condition in uploading same artifact:
|
||||
* if the artifact already exists in the database and thus filesystem then nothing is done
|
||||
* if the artifact is added to the database and/or filesystem we atomically overwrite it
|
||||
- input_id depends on a sort order?
|
||||
*)
|
||||
|
||||
|
||||
let add_build
|
||||
datadir
|
||||
~datadir
|
||||
~cachedir
|
||||
~configdir
|
||||
user_id
|
||||
((job, uuid, console, start, finish, result, _) as exec)
|
||||
((job : Builder.script_job), uuid, console, start, finish, result, raw_artifacts)
|
||||
(module Db : CONN) =
|
||||
let open Builder_db in
|
||||
let job_name = job.Builder.name in
|
||||
let staging_dir = Fpath.(staging datadir / Uuidm.to_string uuid) in
|
||||
let or_cleanup x =
|
||||
Lwt_result.map_err (fun e ->
|
||||
Lwt_result.map_error (fun e ->
|
||||
Bos.OS.Dir.delete ~recurse:true staging_dir
|
||||
|> Result.iter_error (fun e ->
|
||||
Log.err (fun m -> m "Failed to remove staging dir %a: %a"
|
||||
|
@ -207,47 +368,158 @@ let add_build
|
|||
e)
|
||||
x
|
||||
in
|
||||
or_cleanup (save_all staging_dir exec) >>= fun (artifacts, input_files) ->
|
||||
let not_interesting p =
|
||||
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes"
|
||||
in
|
||||
begin
|
||||
List.fold_left
|
||||
(fun r (filepath, data) ->
|
||||
r >>= fun acc ->
|
||||
if not_interesting filepath then
|
||||
Lwt_result.return acc
|
||||
else
|
||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data))
|
||||
and size = String.length data in
|
||||
Lwt_result.ok (Lwt.pause ()) >|= fun () ->
|
||||
({ filepath; sha256; size }, data) :: acc)
|
||||
(Lwt_result.return [])
|
||||
raw_artifacts
|
||||
end >>= fun artifacts ->
|
||||
or_cleanup (prepare_staging staging_dir) >>= fun () ->
|
||||
or_cleanup (save_console_and_script staging_dir job_name uuid console job.Builder.script)
|
||||
>>= fun (console, script) ->
|
||||
List.fold_left
|
||||
(fun r ((f, _) as artifact) ->
|
||||
r >>= fun acc ->
|
||||
Db.find Builder_db.Build_artifact.exists f.sha256 >|= fun exists ->
|
||||
if exists then acc else artifact :: acc)
|
||||
(Lwt_result.return [])
|
||||
artifacts >>= fun artifacts_to_save ->
|
||||
or_cleanup (save_artifacts staging_dir artifacts_to_save) >>= fun () ->
|
||||
let artifacts = List.map fst artifacts in
|
||||
let r =
|
||||
Db.start () >>= fun () ->
|
||||
Db.exec Job.try_add job_name >>= fun () ->
|
||||
Db.find_opt Job.get_id_by_name job_name >>= fun job_id ->
|
||||
Lwt.return (Option.to_result ~none:(`Msg "No such job id") job_id) >>= fun job_id ->
|
||||
let section_tag = "section" in
|
||||
Db.exec Tag.try_add section_tag >>= fun () ->
|
||||
Db.find Tag.get_id_by_name section_tag >>= fun section_id ->
|
||||
let synopsis_tag = "synopsis" in
|
||||
Db.exec Tag.try_add synopsis_tag >>= fun () ->
|
||||
Db.find Tag.get_id_by_name synopsis_tag >>= fun synopsis_id ->
|
||||
let descr_tag = "description" in
|
||||
Db.exec Tag.try_add descr_tag >>= fun () ->
|
||||
Db.find Tag.get_id_by_name descr_tag >>= fun descr_id ->
|
||||
let readme_tag = "readme.md" in
|
||||
Db.exec Tag.try_add readme_tag >>= fun () ->
|
||||
Db.find Tag.get_id_by_name readme_tag >>= fun readme_id ->
|
||||
let input_id = compute_input_id artifacts in
|
||||
let platform = job.Builder.platform in
|
||||
Db.exec Build.add { Build.uuid; start; finish; result;
|
||||
console; script = job.Builder.script;
|
||||
main_binary = None; user_id; job_id } >>= fun () ->
|
||||
console; script; platform;
|
||||
main_binary = None; input_id; user_id; job_id } >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun id ->
|
||||
let sec_syn = infer_section_and_synopsis raw_artifacts in
|
||||
let add_or_update tag_id tag_value =
|
||||
Db.find_opt Job_tag.get_value (tag_id, job_id) >>= function
|
||||
| None -> Db.exec Job_tag.add (tag_id, tag_value, job_id)
|
||||
| Some _ -> Db.exec Job_tag.update (tag_id, tag_value, job_id)
|
||||
in
|
||||
(match fst sec_syn with
|
||||
| None -> Lwt_result.return ()
|
||||
| Some section_v -> add_or_update section_id section_v) >>= fun () ->
|
||||
(match snd sec_syn with
|
||||
| None, _-> Lwt_result.return ()
|
||||
| Some synopsis_v, _ -> add_or_update synopsis_id synopsis_v) >>= fun () ->
|
||||
(match snd sec_syn with
|
||||
| _, None -> Lwt_result.return ()
|
||||
| _, Some descr_v -> add_or_update descr_id descr_v) >>= fun () ->
|
||||
(let readme =
|
||||
List.find_opt (fun (p, _) -> Fpath.(equal (v "README.md") p)) raw_artifacts
|
||||
in
|
||||
let readme_anywhere =
|
||||
List.find_opt (fun (p, _) -> String.equal "README.md" (Fpath.basename p)) raw_artifacts
|
||||
in
|
||||
match readme, readme_anywhere with
|
||||
| None, None -> Lwt_result.return ()
|
||||
| Some (_, data), _ | None, Some (_, data) -> add_or_update readme_id data) >>= fun () ->
|
||||
(match List.partition (fun p -> Fpath.(is_prefix (v "bin/") p.filepath)) artifacts with
|
||||
| [ main_binary ], other_artifacts ->
|
||||
Db.exec Build_artifact.add (main_binary, id) >>= fun () ->
|
||||
Db.find Builder_db.last_insert_rowid () >>= fun main_binary_id ->
|
||||
Db.exec Build.set_main_binary (id, main_binary_id) >|= fun () ->
|
||||
Some main_binary, other_artifacts
|
||||
| [], _ ->
|
||||
Log.debug (fun m -> m "Zero binaries for build %a" Uuidm.pp uuid);
|
||||
Lwt_result.return (None, artifacts)
|
||||
| xs, _ ->
|
||||
Log.warn (fun m -> m "Multiple binaries for build %a: %a" Uuidm.pp uuid
|
||||
Fmt.(list ~sep:(any ",") Fpath.pp)
|
||||
(List.map (fun a -> a.filepath) xs));
|
||||
Lwt_result.return (None, artifacts)) >>= fun (main_binary, remaining_artifacts_to_add) ->
|
||||
List.fold_left
|
||||
(fun r file ->
|
||||
r >>= fun () ->
|
||||
Db.exec Build_artifact.add (file, id))
|
||||
(Lwt_result.return ())
|
||||
artifacts >>= fun () ->
|
||||
List.fold_left
|
||||
(fun r file ->
|
||||
r >>= fun () ->
|
||||
Db.exec Build_file.add (file, id))
|
||||
(Lwt_result.return ())
|
||||
input_files >>= fun () ->
|
||||
Db.collect_list Build_artifact.get_all_by_build id >>= fun artifacts ->
|
||||
(match List.filter (fun (_, p) -> Fpath.(is_prefix (v "bin/") p.filepath)) artifacts with
|
||||
| [ (build_artifact_id, _) ] -> Db.exec Build.set_main_binary (id, build_artifact_id)
|
||||
| [] ->
|
||||
Log.debug (fun m -> m "Zero binaries for build %a" Uuidm.pp uuid);
|
||||
Lwt_result.return ()
|
||||
| xs ->
|
||||
Log.warn (fun m -> m "Multiple binaries for build %a: %a" Uuidm.pp uuid
|
||||
Fmt.(list ~sep:(any ",") Fpath.pp)
|
||||
(List.map (fun (_, a) -> a.filepath) xs));
|
||||
Lwt_result.return ()) >>= fun () ->
|
||||
Db.commit () >>= fun () ->
|
||||
commit_files datadir staging_dir job_name uuid
|
||||
remaining_artifacts_to_add >>= fun () ->
|
||||
commit_files datadir staging_dir job_name uuid (List.map fst artifacts_to_save) >>= fun () ->
|
||||
Db.commit () >|= fun () ->
|
||||
main_binary
|
||||
in
|
||||
Lwt_result.bind_lwt_err (or_cleanup r)
|
||||
Lwt_result.bind_lwt_error (or_cleanup r)
|
||||
(fun e ->
|
||||
Db.rollback ()
|
||||
|> Lwt.map (fun r ->
|
||||
Result.iter_error
|
||||
(fun e' -> Log.err (fun m -> m "Failed rollback: %a" Caqti_error.pp e'))
|
||||
r;
|
||||
e))
|
||||
e)) >>= function
|
||||
| None -> Lwt.return (Ok ())
|
||||
| Some main_binary ->
|
||||
let time =
|
||||
let (y, m, d), ((hh, mm, ss), _) = Ptime.to_date_time start in
|
||||
Printf.sprintf "%04d%02d%02d%02d%02d%02d" y m d hh mm ss
|
||||
and uuid = Uuidm.to_string uuid
|
||||
and job = job.name
|
||||
and platform = job.platform
|
||||
and sha256 = Ohex.encode main_binary.sha256
|
||||
in
|
||||
let fp_str p = Fpath.(to_string (datadir // p)) in
|
||||
let args =
|
||||
String.concat " "
|
||||
(List.map (fun s -> "\"" ^ String.escaped s ^ "\"")
|
||||
[ "--build-time=" ^ time ; "--sha256=" ^ sha256 ; "--job=" ^ job ;
|
||||
"--uuid=" ^ uuid ; "--platform=" ^ platform ;
|
||||
"--cache-dir=" ^ Fpath.to_string cachedir ;
|
||||
"--data-dir=" ^ Fpath.to_string datadir ;
|
||||
"--main-binary-filepath=" ^ Fpath.to_string main_binary.filepath ;
|
||||
fp_str Fpath.(datadir // artifact_path main_binary) ])
|
||||
in
|
||||
Log.debug (fun m -> m "executing hooks with %s" args);
|
||||
let dir = Fpath.(configdir / "upload-hooks") in
|
||||
(try
|
||||
Lwt.return (Ok (Some (Unix.opendir (Fpath.to_string dir))))
|
||||
with
|
||||
Unix.Unix_error _ -> Lwt.return (Ok None)) >>= function
|
||||
| None -> Lwt.return (Ok ())
|
||||
| Some dh ->
|
||||
try
|
||||
let is_executable file =
|
||||
let st = Unix.stat (Fpath.to_string file) in
|
||||
st.Unix.st_perm land 0o111 = 0o111 &&
|
||||
st.Unix.st_kind = Unix.S_REG
|
||||
in
|
||||
let rec go () =
|
||||
let next_file = Unix.readdir dh in
|
||||
let file = Fpath.(dir / next_file) in
|
||||
if is_executable file && Fpath.has_ext ".sh" file then
|
||||
ignore (Sys.command (Fpath.to_string file ^ " " ^ args ^ " &"));
|
||||
go ()
|
||||
in
|
||||
go ()
|
||||
with
|
||||
| End_of_file ->
|
||||
Unix.closedir dh;
|
||||
Lwt.return (Ok ())
|
||||
|
|
|
@ -2,9 +2,10 @@ type error = [ Caqti_error.call_or_retrieve | `Not_found | `File_error of Fpath.
|
|||
|
||||
val pp_error : Format.formatter -> error -> unit
|
||||
|
||||
val not_found : 'a option -> ('a, [> error ]) result Lwt.t
|
||||
val not_found : 'a option -> ('a, [> `Not_found ]) result Lwt.t
|
||||
|
||||
val staging : Fpath.t -> Fpath.t
|
||||
val artifact_path : Builder_db.file -> Fpath.t
|
||||
|
||||
val cleanup_staging : Fpath.t -> Caqti_lwt.connection ->
|
||||
(unit, [> `Msg of string ]) result Lwt.t
|
||||
|
@ -12,57 +13,94 @@ val cleanup_staging : Fpath.t -> Caqti_lwt.connection ->
|
|||
val build_artifact : Uuidm.t -> Fpath.t -> Caqti_lwt.connection ->
|
||||
(Builder_db.file, [> error ]) result Lwt.t
|
||||
|
||||
val build_artifact_by_id : [`build_artifact] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Builder_db.file, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val build_artifact_data : Fpath.t -> Builder_db.file ->
|
||||
(string, [> error ]) result Lwt.t
|
||||
|
||||
val build_artifacts : Builder_db.id -> Caqti_lwt.connection ->
|
||||
val build_artifacts : [`build] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Builder_db.file list, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val solo5_manifest : Fpath.t -> Builder_db.file -> Solo5_elftool.mft option
|
||||
|
||||
val platforms_of_job : [`job] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(string list, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val build : Uuidm.t -> Caqti_lwt.connection ->
|
||||
(Builder_db.id * Builder_db.Build.t, [> error ]) result Lwt.t
|
||||
([`build] Builder_db.id * Builder_db.Build.t, [> error ]) result Lwt.t
|
||||
|
||||
val build_meta : Builder_db.id -> Caqti_lwt.connection ->
|
||||
((Builder_db.Build.Meta.t * Builder_db.file option) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
val build_with_main_binary : [`job] Builder_db.id -> string -> Caqti_lwt.connection ->
|
||||
((Builder_db.Build.t * Builder_db.file) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val build_hash : Cstruct.t -> Caqti_lwt.connection ->
|
||||
val build_hash : string -> Caqti_lwt.connection ->
|
||||
((string * Builder_db.Build.t) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val build_exists : Uuidm.t -> Caqti_lwt.connection ->
|
||||
(bool, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val latest_build_uuid : Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Uuidm.t, [> error ]) result Lwt.t
|
||||
|
||||
val latest_successful_build_uuid : Builder_db.id -> Caqti_lwt.connection ->
|
||||
val latest_successful_build_uuid : [`job] Builder_db.id -> string option -> Caqti_lwt.connection ->
|
||||
(Uuidm.t option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val previous_successful_build : Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Builder_db.Build.Meta.t option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
val latest_successful_build : [`job] Builder_db.id -> string option -> Caqti_lwt.connection ->
|
||||
(Builder_db.Build.t option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val main_binary : Builder_db.id -> Fpath.t option -> Caqti_lwt.connection ->
|
||||
(Builder_db.file option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
val previous_successful_build_different_output : [`build] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Builder_db.Build.t option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val job : string -> Caqti_lwt.connection ->
|
||||
((Builder_db.Build.Meta.t * Builder_db.file option) list, [> error ]) result Lwt.t
|
||||
val next_successful_build_different_output : [`build] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Builder_db.Build.t option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val failed_builds : start:int -> count:int -> string option -> Caqti_lwt.connection ->
|
||||
((string * Builder_db.Build.t) list, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val builds_with_different_input_and_same_main_binary : [`build] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Builder_db.Build.t list, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val builds_with_same_input_and_same_main_binary : [`build] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Builder_db.Build.t list, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val builds_with_same_input_and_different_main_binary : [`build] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(Builder_db.Build.t list, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val build_console_by_uuid : Fpath.t -> Uuidm.t -> Caqti_lwt.connection ->
|
||||
(string, [> error ]) result Lwt.t
|
||||
|
||||
val build_script_by_uuid : Fpath.t -> Uuidm.t -> Caqti_lwt.connection ->
|
||||
(string, [> error ]) result Lwt.t
|
||||
|
||||
val readme : string -> Caqti_lwt.connection ->
|
||||
(string option, [> error ]) result Lwt.t
|
||||
|
||||
val job_and_readme : string -> Caqti_lwt.connection ->
|
||||
([`job] Builder_db.id * string option, [> error ]) result Lwt.t
|
||||
|
||||
val builds_grouped_by_output : [`job] Builder_db.id -> string option -> Caqti_lwt.connection ->
|
||||
((Builder_db.Build.t * Builder_db.file option) list, [> error ]) result Lwt.t
|
||||
|
||||
val builds_grouped_by_output_with_failed : [`job] Builder_db.id -> string option -> Caqti_lwt.connection ->
|
||||
((Builder_db.Build.t * Builder_db.file option) list, [> error ]) result Lwt.t
|
||||
|
||||
val job_id : string -> Caqti_lwt.connection ->
|
||||
(Builder_db.id option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
([`job] Builder_db.id option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val jobs : Caqti_lwt.connection ->
|
||||
((Builder_db.id * string) list, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
val jobs_with_section_synopsis : Caqti_lwt.connection ->
|
||||
(([`job] Builder_db.id * string * string option * string option) list, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val job_name : Builder_db.id -> Caqti_lwt.connection ->
|
||||
val job_name : [`job] Builder_db.id -> Caqti_lwt.connection ->
|
||||
(string, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val user : string -> Caqti_lwt.connection ->
|
||||
((Builder_db.id * Builder_web_auth.scrypt Builder_web_auth.user_info) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
(([`user] Builder_db.id * Builder_web_auth.scrypt Builder_web_auth.user_info) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val authorized : Builder_db.id -> string -> Caqti_lwt.connection -> (unit, [> Caqti_error.call_or_retrieve | `Msg of string ]) result Lwt.t
|
||||
val authorized : [`user] Builder_db.id -> string -> Caqti_lwt.connection -> (unit, [> Caqti_error.call_or_retrieve | `Msg of string ]) result Lwt.t
|
||||
|
||||
val add_build :
|
||||
Fpath.t ->
|
||||
Builder_db.id ->
|
||||
(Builder.job * Uuidm.t * (int * string) list * Ptime.t * Ptime.t *
|
||||
datadir:Fpath.t ->
|
||||
cachedir:Fpath.t ->
|
||||
configdir:Fpath.t ->
|
||||
[`user] Builder_db.id ->
|
||||
(Builder.script_job * Uuidm.t * (int * string) list * Ptime.t * Ptime.t *
|
||||
Builder.execution_result * (Fpath.t * string) list) ->
|
||||
Caqti_lwt.connection ->
|
||||
(unit, [> Caqti_error.call_or_retrieve | `Msg of string ]) result Lwt.t
|
||||
|
|
128
lib/utils.ml
Normal file
128
lib/utils.ml
Normal file
|
@ -0,0 +1,128 @@
|
|||
|
||||
module String_map = struct
|
||||
include Map.Make(String)
|
||||
|
||||
let add_or_create key v t=
|
||||
update key (function None -> Some [ v ] | Some xs -> Some (v :: xs)) t
|
||||
end
|
||||
|
||||
let diff_map_to_json (left, right, different_versions) =
|
||||
let convert_list lst =
|
||||
`List (List.map (fun (name, version) ->
|
||||
`Assoc [("name", `String name); ("version", `String version)]
|
||||
) lst)
|
||||
in
|
||||
let convert_diff_versions lst =
|
||||
`List (List.map (fun (name, version1, version2) ->
|
||||
`Assoc [
|
||||
("name", `String name);
|
||||
("version_in_left", `String version1);
|
||||
("version_in_right", `String version2)
|
||||
]
|
||||
) lst)
|
||||
in
|
||||
`Assoc [
|
||||
("left_packages", convert_list left);
|
||||
("right_packages", convert_list right);
|
||||
("different_versions", convert_diff_versions different_versions)
|
||||
]
|
||||
|
||||
let diff_map a b =
|
||||
let diff a b =
|
||||
String_map.fold (fun k v acc ->
|
||||
if not (String_map.mem k b) then (k, v) :: acc else acc)
|
||||
a [] |> List.rev
|
||||
in
|
||||
let added = diff b a
|
||||
and removed = diff a b
|
||||
and changed =
|
||||
String_map.fold (fun k v acc ->
|
||||
match String_map.find_opt k b with
|
||||
| None -> acc
|
||||
| Some v' -> if String.equal v v' then acc else (k, v, v') :: acc)
|
||||
a [] |> List.rev
|
||||
in
|
||||
(added, removed, changed)
|
||||
|
||||
let compare_env env1 env2 =
|
||||
let parse_env e =
|
||||
List.fold_left (fun m s ->
|
||||
match Astring.String.cut ~sep:"=" s with
|
||||
| Some (key, value) -> String_map.add key value m
|
||||
| None -> String_map.add s "" m)
|
||||
String_map.empty (Astring.String.cuts ~sep:"\n" e)
|
||||
in
|
||||
diff_map (parse_env env1) (parse_env env2)
|
||||
|
||||
let compare_pkgs p1 p2 =
|
||||
let parse_pkgs p =
|
||||
List.fold_left (fun m s ->
|
||||
match Astring.String.cut ~sep:"=" s with
|
||||
| Some (name, version) -> String_map.add name version m
|
||||
| None -> match Astring.String.cut ~sep:"-" s with
|
||||
| Some (name, version) -> String_map.add name version m
|
||||
| None -> String_map.add s "" m)
|
||||
String_map.empty (Astring.String.cuts ~sep:"\n" p)
|
||||
in
|
||||
diff_map (parse_pkgs p1) (parse_pkgs p2)
|
||||
|
||||
let md_to_html ?adjust_heading ?(safe = true) data =
|
||||
let open Cmarkit in
|
||||
let doc = Doc.of_string ~strict:false ~heading_auto_ids:true data in
|
||||
let doc =
|
||||
Option.fold ~none:doc
|
||||
~some:(fun lvl ->
|
||||
let block _m = function
|
||||
| Block.Heading (h, meta) ->
|
||||
let open Block.Heading in
|
||||
let level = level h
|
||||
and id = id h
|
||||
and layout = layout h
|
||||
and inline = inline h
|
||||
in
|
||||
let h' = make ?id ~layout ~level:(level + lvl) inline in
|
||||
Mapper.ret (Block.Heading (h', meta))
|
||||
| Block.Blocks _ -> Mapper.default
|
||||
| x -> Mapper.ret x
|
||||
in
|
||||
let mapper = Mapper.make ~block () in
|
||||
Mapper.map_doc mapper doc)
|
||||
adjust_heading
|
||||
in
|
||||
Cmarkit_html.of_doc ~safe doc
|
||||
|
||||
module Path = struct
|
||||
|
||||
let to_url ~path ~queries =
|
||||
let path = match path with
|
||||
| "" :: [] -> "/"
|
||||
| path -> "/" ^ String.concat "/" path
|
||||
in
|
||||
let query = queries |> List.map (fun (k, v) -> k, [v]) in
|
||||
Uri.make ~path ~query () |> Uri.to_string
|
||||
|
||||
(* Like Dream.path in 1.0.0~alpha2 but on Dream.target *)
|
||||
let of_url uri_str =
|
||||
let path_str = uri_str |> Uri.of_string |> Uri.path in
|
||||
match String.split_on_char '/' path_str with
|
||||
| "" :: (_ :: _ as tail) -> tail
|
||||
| path -> path
|
||||
|
||||
let matches_dreamroute ~path dreamroute =
|
||||
let is_match path_elem dpath_elem =
|
||||
(dpath_elem |> String.starts_with ~prefix:":")
|
||||
|| path_elem = dpath_elem
|
||||
in
|
||||
let rec aux path dreampath =
|
||||
match path, dreampath with
|
||||
| [] , _ :: _ -> false (*length path < length dreampath*)
|
||||
| _ , [] -> true (*length path >= length dreampath *)
|
||||
| _ :: _ , "" :: [] -> true (*dreampath ends in '/'*)
|
||||
| p_elem :: path, dp_elem :: dreampath ->
|
||||
is_match p_elem dp_elem
|
||||
&& aux path dreampath
|
||||
in
|
||||
let dreampath = dreamroute |> of_url in
|
||||
aux path dreampath
|
||||
|
||||
end
|
1166
lib/views.ml
1166
lib/views.ml
File diff suppressed because it is too large
Load diff
|
@ -1,3 +1,3 @@
|
|||
(library
|
||||
(name opamdiff)
|
||||
(libraries opam-core opam-format))
|
||||
(libraries opam-core opam-format yojson))
|
||||
|
|
|
@ -1,13 +1,156 @@
|
|||
module Set = OpamPackage.Set
|
||||
|
||||
type package = OpamPackage.t
|
||||
|
||||
let packages (switch : OpamFile.SwitchExport.t) =
|
||||
assert (Set.cardinal switch.selections.sel_pinned = 0);
|
||||
assert (Set.cardinal switch.selections.sel_compiler = 0);
|
||||
assert (Set.subset switch.selections.sel_roots switch.selections.sel_installed);
|
||||
switch.selections.sel_installed
|
||||
|
||||
let duniverse_dir = "x-opam-monorepo-duniverse-dirs"
|
||||
|
||||
module M = Map.Make(String)
|
||||
|
||||
let duniverse_dirs_data =
|
||||
(* the representation in the file is [ URL DIR [ HASH* ] ] *)
|
||||
let open OpamParserTypes.FullPos in
|
||||
let ( let* ) = Result.bind in
|
||||
let string ~ctx = function
|
||||
| { pelem = String s ; _ } -> Ok s
|
||||
| _ -> Error (`Msg ("couldn't find a string " ^ ctx))
|
||||
in
|
||||
let extract_data = function
|
||||
| { pelem = List { pelem = [ url ; dir ; hashes ] ; _ } ; _ } ->
|
||||
let* url = string ~ctx:"url" url in
|
||||
let* hashes =
|
||||
match hashes with
|
||||
| { pelem = List { pelem = hashes ; _ } ; _ } ->
|
||||
List.fold_left (fun acc hash ->
|
||||
let* acc = acc in
|
||||
let* hash = string ~ctx:"hash" hash in
|
||||
let* h = match OpamHash.of_string_opt hash with
|
||||
| Some h -> Ok OpamHash.(kind h, contents h)
|
||||
| None -> Error (`Msg ("couldn't decode opam hash in " ^ hash))
|
||||
in
|
||||
Ok (h :: acc))
|
||||
(Ok []) hashes
|
||||
| _ -> Error (`Msg "couldn't decode hashes")
|
||||
in
|
||||
let* dir = string ~ctx:"directory" dir in
|
||||
Ok (url, dir, List.rev hashes)
|
||||
| { pelem = List { pelem = [ url ; dir ] ; _ } ; _ } ->
|
||||
let* url = string ~ctx:"url" url in
|
||||
let* dir = string ~ctx:"directory" dir in
|
||||
Ok (url, dir, [])
|
||||
| _ -> Error (`Msg "expected a list of URL, DIR, [HASHES]")
|
||||
in
|
||||
function
|
||||
| { pelem = List { pelem = lbody ; _ } ; _ } ->
|
||||
List.fold_left (fun acc v ->
|
||||
let* acc = acc in
|
||||
let* (url, dir, hashes) = extract_data v in
|
||||
Ok (M.add dir (url, hashes) acc))
|
||||
(Ok M.empty) lbody
|
||||
| _ -> Error (`Msg "expected a list or a nested list")
|
||||
|
||||
let duniverse (switch : OpamFile.SwitchExport.t) =
|
||||
let root = switch.OpamFile.SwitchExport.selections.OpamTypes.sel_roots in
|
||||
if OpamPackage.Set.cardinal root = 1 then
|
||||
let root = OpamPackage.Set.choose root in
|
||||
match OpamPackage.(Name.Map.find_opt root.name switch.OpamFile.SwitchExport.overlays) with
|
||||
| None -> Error (`Msg "opam switch export doesn't contain the main package")
|
||||
| Some opam ->
|
||||
match OpamFile.OPAM.extended opam duniverse_dir duniverse_dirs_data with
|
||||
| None -> Ok None
|
||||
| Some Error e -> Error e
|
||||
| Some Ok v -> Ok (Some v)
|
||||
else
|
||||
Error (`Msg "not a single root package found in opam switch export")
|
||||
|
||||
type duniverse_diff = {
|
||||
name : string ;
|
||||
urls : string * string option ;
|
||||
hash : (OpamHash.kind * string option * string option) list ;
|
||||
}
|
||||
|
||||
let pp_duniverse_diff ppf { name ; urls ; hash } =
|
||||
let opt_hash = Option.value ~default:"NONE" in
|
||||
Format.fprintf ppf "%s (%s%s) %s"
|
||||
name
|
||||
(fst urls)
|
||||
(Option.fold ~none:"" ~some:(fun url -> "->" ^ url) (snd urls))
|
||||
(String.concat ", " (List.map (fun (h, l, r) ->
|
||||
OpamHash.string_of_kind h ^ " " ^ opt_hash l ^ "->" ^ opt_hash r) hash))
|
||||
|
||||
let pp_duniverse_dir ppf (dir, url) =
|
||||
Format.fprintf ppf "%s (%s)" dir url
|
||||
|
||||
let duniverse_diff l r =
|
||||
let l = Option.value l ~default:M.empty
|
||||
and r = Option.value r ~default:M.empty
|
||||
in
|
||||
let keys_l_only = ref [] and keys_r_only = ref [] and diff = ref [] in
|
||||
let equal_hashes l r =
|
||||
(* l and r are lists of pairs, with the hash kind and its value *)
|
||||
(* for a git remote, the hashes are empty lists *)
|
||||
(match l with [] -> false | _ -> true) &&
|
||||
(match r with [] -> false | _ -> true) &&
|
||||
List.for_all (fun (h, v) ->
|
||||
match List.assoc_opt h r with
|
||||
| None -> false
|
||||
| Some v' -> String.equal v v')
|
||||
l &&
|
||||
List.for_all (fun (h, v) ->
|
||||
match List.assoc_opt h l with
|
||||
| None -> false
|
||||
| Some v' -> String.equal v v')
|
||||
r
|
||||
in
|
||||
let _ =
|
||||
M.merge (fun key l r ->
|
||||
match l, r with
|
||||
| None, Some _ -> keys_r_only := key :: !keys_r_only; None
|
||||
| Some _, None -> keys_l_only := key :: !keys_l_only; None
|
||||
| None, None -> None
|
||||
| Some (_, l), Some (_, r) when equal_hashes l r -> None
|
||||
| Some (url1, []), Some (url2, []) when String.equal url1 url2 -> None
|
||||
| Some l, Some r -> diff := (key, l, r) :: !diff; None)
|
||||
l r
|
||||
in
|
||||
let dir_only keys map =
|
||||
let only =
|
||||
M.filter (fun k _ -> List.mem k keys) map |> M.bindings
|
||||
in
|
||||
List.map (fun (key, (url, _)) -> key, url) only
|
||||
in
|
||||
let l_only = dir_only !keys_l_only l
|
||||
and r_only = dir_only !keys_r_only r
|
||||
and diff =
|
||||
List.map (fun (name, (url_l, hashes_l), (url_r, hashes_r)) ->
|
||||
let urls =
|
||||
if String.equal url_l url_r then url_l, None else url_l, Some url_r
|
||||
in
|
||||
let hash =
|
||||
List.fold_left (fun acc (h, v) ->
|
||||
match List.assoc_opt h hashes_r with
|
||||
| None -> (h, Some v, None) :: acc
|
||||
| Some v' ->
|
||||
if String.equal v v' then
|
||||
acc
|
||||
else
|
||||
(h, Some v, Some v') :: acc)
|
||||
[] hashes_l
|
||||
in
|
||||
let hash = List.fold_left (fun acc (h', v') ->
|
||||
match List.assoc_opt h' hashes_l with
|
||||
| None -> (h', None, Some v') :: acc
|
||||
| Some _ -> acc)
|
||||
hash hashes_r
|
||||
in
|
||||
{ name ; urls ; hash })
|
||||
!diff
|
||||
in
|
||||
l_only, r_only, diff
|
||||
|
||||
type version_diff = {
|
||||
name : OpamPackage.Name.t;
|
||||
version_left : OpamPackage.Version.t;
|
||||
|
@ -23,6 +166,75 @@ let pp_version_diff ppf { name; version_left; version_right } =
|
|||
(OpamPackage.Version.to_string version_left)
|
||||
(OpamPackage.Version.to_string version_right)
|
||||
|
||||
type opam_diff = {
|
||||
pkg : OpamPackage.t ;
|
||||
build : (OpamTypes.command list * OpamTypes.command list) option ;
|
||||
install : (OpamTypes.command list * OpamTypes.command list) option ;
|
||||
url : (OpamFile.URL.t option * OpamFile.URL.t option) option ;
|
||||
otherwise_equal : bool ;
|
||||
}
|
||||
|
||||
let commands_to_strings (l, r) =
|
||||
let v a =
|
||||
OpamPrinter.FullPos.value (OpamPp.print OpamFormat.V.command a)
|
||||
in
|
||||
List.map v l, List.map v r
|
||||
|
||||
let opt_url_to_string (l, r) =
|
||||
let url_to_s = function
|
||||
| None -> "" | Some u -> OpamFile.URL.write_to_string u
|
||||
in
|
||||
url_to_s l, url_to_s r
|
||||
|
||||
let pp_opam_diff ppf { pkg ; otherwise_equal ; _ } =
|
||||
Format.fprintf ppf "%a%s"
|
||||
pp_opampackage pkg
|
||||
(if otherwise_equal then "" else " (and additional changes)")
|
||||
|
||||
let rec strip_common_prefix a b =
|
||||
match a, b with
|
||||
| hd::tl, hd'::tl' ->
|
||||
if hd = hd' then
|
||||
strip_common_prefix tl tl'
|
||||
else
|
||||
a, b
|
||||
| a, b -> a, b
|
||||
|
||||
let detailed_opam_diff pkg l r =
|
||||
let no_build_install_url p =
|
||||
OpamFile.OPAM.with_url_opt None
|
||||
(OpamFile.OPAM.with_install []
|
||||
(OpamFile.OPAM.with_build [] p))
|
||||
in
|
||||
let otherwise_equal =
|
||||
OpamFile.OPAM.effectively_equal
|
||||
(no_build_install_url l) (no_build_install_url r)
|
||||
and build =
|
||||
if OpamFile.OPAM.build l = OpamFile.OPAM.build r then
|
||||
None
|
||||
else
|
||||
Some (strip_common_prefix (OpamFile.OPAM.build l) (OpamFile.OPAM.build r))
|
||||
and install =
|
||||
if OpamFile.OPAM.install l = OpamFile.OPAM.install r then
|
||||
None
|
||||
else
|
||||
Some (strip_common_prefix (OpamFile.OPAM.install l) (OpamFile.OPAM.install r))
|
||||
and url =
|
||||
if OpamFile.OPAM.url l = OpamFile.OPAM.url r then
|
||||
None
|
||||
else
|
||||
Some (OpamFile.OPAM.url l, OpamFile.OPAM.url r)
|
||||
in
|
||||
{ pkg ; build ; install ; url ; otherwise_equal }
|
||||
|
||||
let detailed_opam_diffs left right pkgs =
|
||||
OpamPackage.Set.fold (fun p acc ->
|
||||
let find = OpamPackage.Name.Map.find p.name in
|
||||
let opam_left = find left.OpamFile.SwitchExport.overlays
|
||||
and opam_right = find right.OpamFile.SwitchExport.overlays in
|
||||
(detailed_opam_diff p opam_left opam_right) :: acc)
|
||||
pkgs []
|
||||
|
||||
let compare left right =
|
||||
let packages_left = packages left and packages_right = packages right in
|
||||
let module Set = OpamPackage.Set in
|
||||
|
@ -33,12 +245,12 @@ let compare left right =
|
|||
l
|
||||
in
|
||||
let same_version = Set.inter packages_left packages_right in
|
||||
let (same, opam_diff) =
|
||||
Set.partition
|
||||
let opam_diff =
|
||||
Set.filter
|
||||
(fun p ->
|
||||
let find = OpamPackage.Name.Map.find p.name in
|
||||
let opam_left = find left.overlays and opam_right = find right.overlays in
|
||||
OpamFile.OPAM.effectively_equal opam_left opam_right)
|
||||
not (OpamFile.OPAM.effectively_equal opam_left opam_right))
|
||||
same_version
|
||||
and version_diff =
|
||||
List.filter_map (fun p1 ->
|
||||
|
@ -53,7 +265,65 @@ let compare left right =
|
|||
| None ->
|
||||
None)
|
||||
(Set.elements packages_left)
|
||||
and left = diff packages_left packages_right
|
||||
and right = diff packages_right packages_left
|
||||
and left_pkgs = diff packages_left packages_right
|
||||
and right_pkgs = diff packages_right packages_left
|
||||
in
|
||||
(same, opam_diff, version_diff, left, right)
|
||||
let opam_diff = detailed_opam_diffs left right opam_diff in
|
||||
let duniverse_ret =
|
||||
match duniverse left, duniverse right with
|
||||
| Ok l, Ok r -> Ok (duniverse_diff l r)
|
||||
| Error _ as e, _ | _, (Error _ as e) -> e
|
||||
in
|
||||
(opam_diff, version_diff, left_pkgs, right_pkgs, duniverse_ret)
|
||||
|
||||
let compare_to_json
|
||||
(opam_diff, version_diff, left_pkgs, right_pkgs, duniverse_diff) : Yojson.Basic.t =
|
||||
let version_diff_to_json lst =
|
||||
`List (List.map (fun { name; version_left; version_right } ->
|
||||
`Assoc [
|
||||
("name", `String (OpamPackage.Name.to_string name));
|
||||
("version_left", `String (OpamPackage.Version.to_string version_left));
|
||||
("version_right", `String (OpamPackage.Version.to_string version_right))
|
||||
]
|
||||
) lst)
|
||||
in
|
||||
let package_set_to_json set =
|
||||
`List (Set.fold (fun p acc ->
|
||||
let json = `Assoc [
|
||||
("name", `String (OpamPackage.Name.to_string p.OpamPackage.name));
|
||||
("version", `String (OpamPackage.Version.to_string p.OpamPackage.version))
|
||||
] in
|
||||
json :: acc
|
||||
) set [])
|
||||
in
|
||||
let opam_diff_to_json opam_diff =
|
||||
`List (List.map (fun (diff : opam_diff) ->
|
||||
`Assoc [
|
||||
|
||||
("package_version", `String (OpamPackage.to_string diff.pkg));
|
||||
("otherwise_equal", `Bool diff.otherwise_equal)
|
||||
]
|
||||
|
||||
) opam_diff)
|
||||
in
|
||||
let duniverse_to_json = function
|
||||
| Ok (left, right, detailed_diff) ->
|
||||
`Assoc [
|
||||
("left", `List (List.map (fun (k, v) -> `Assoc [("name", `String k); ("value", `String v)]) left));
|
||||
("right", `List (List.map (fun (k, v) -> `Assoc [("name", `String k); ("value", `String v)]) right));
|
||||
("detailed_diff",`List (List.map (fun (diff : duniverse_diff) ->
|
||||
`Assoc [
|
||||
("name", `String diff.name);
|
||||
]) detailed_diff))
|
||||
]
|
||||
|
||||
| Error (`Msg msg) ->
|
||||
`String msg
|
||||
in
|
||||
`Assoc [
|
||||
("opam_diff", opam_diff_to_json opam_diff);
|
||||
("version_diff", version_diff_to_json version_diff);
|
||||
("only_in_left", package_set_to_json left_pkgs);
|
||||
("only_in_right", package_set_to_json right_pkgs);
|
||||
("duniverse_diff", duniverse_to_json duniverse_diff)
|
||||
]
|
||||
|
|
42
opamdiff/opamdiff.mli
Normal file
42
opamdiff/opamdiff.mli
Normal file
|
@ -0,0 +1,42 @@
|
|||
type opam_diff = {
|
||||
pkg : OpamPackage.t ;
|
||||
build : (OpamTypes.command list * OpamTypes.command list) option ;
|
||||
install : (OpamTypes.command list * OpamTypes.command list) option ;
|
||||
url : (OpamFile.URL.t option * OpamFile.URL.t option) option ;
|
||||
otherwise_equal : bool ;
|
||||
}
|
||||
|
||||
type version_diff = {
|
||||
name : OpamPackage.Name.t;
|
||||
version_left : OpamPackage.Version.t;
|
||||
version_right : OpamPackage.Version.t;
|
||||
}
|
||||
|
||||
type duniverse_diff = {
|
||||
name : string ;
|
||||
urls : string * string option ;
|
||||
hash : (OpamHash.kind * string option * string option) list ;
|
||||
}
|
||||
|
||||
val pp_opampackage : Format.formatter -> OpamPackage.t -> unit
|
||||
|
||||
val pp_version_diff : Format.formatter -> version_diff -> unit
|
||||
|
||||
val pp_duniverse_diff : Format.formatter -> duniverse_diff -> unit
|
||||
|
||||
val pp_duniverse_dir : Format.formatter -> string * string -> unit
|
||||
|
||||
val pp_opam_diff : Format.formatter -> opam_diff -> unit
|
||||
|
||||
val commands_to_strings : OpamTypes.command list * OpamTypes.command list -> string list * string list
|
||||
|
||||
val opt_url_to_string : OpamFile.URL.t option * OpamFile.URL.t option -> string * string
|
||||
|
||||
|
||||
val compare : OpamFile.SwitchExport.t ->
|
||||
OpamFile.SwitchExport.t ->
|
||||
opam_diff list * version_diff list * OpamPackage.Set.t * OpamPackage.Set.t * ((string * string) list * (string * string) list * duniverse_diff list, [> `Msg of string ]) result
|
||||
|
||||
|
||||
val compare_to_json : opam_diff list * version_diff list * OpamPackage.Set.t * OpamPackage.Set.t *
|
||||
((string * string) list * (string * string) list * duniverse_diff list, [< `Msg of string ]) result -> Yojson.Basic.t
|
137
packaging/FreeBSD-repo.sh
Executable file
137
packaging/FreeBSD-repo.sh
Executable file
|
@ -0,0 +1,137 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
prog_NAME=$(basename "${0}")
|
||||
|
||||
warn()
|
||||
{
|
||||
echo "${prog_NAME}: WARN: $*"
|
||||
}
|
||||
|
||||
err()
|
||||
{
|
||||
echo "${prog_NAME}: ERROR: $*" 1>&2
|
||||
}
|
||||
|
||||
die()
|
||||
{
|
||||
echo "${prog_NAME}: ERROR: $*" 1>&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
usage()
|
||||
{
|
||||
cat <<EOM 1>&2
|
||||
usage: ${prog_NAME} [ OPTIONS ] FILE
|
||||
Updates a FreeBSD package repository
|
||||
Options:
|
||||
--build-time=STRING
|
||||
Build timestamp (used for the version of the package).
|
||||
--sha256=STRING
|
||||
Hex encoded SHA256 digest of the main binary.
|
||||
--job=STRING
|
||||
Job name that was built.
|
||||
--main-binary-filepath=STRING
|
||||
The file path of the main binary.
|
||||
EOM
|
||||
exit 1
|
||||
}
|
||||
|
||||
BUILD_TIME=
|
||||
SHA=
|
||||
JOB=
|
||||
FILEPATH=
|
||||
|
||||
while [ $# -gt 1 ]; do
|
||||
OPT="$1"
|
||||
|
||||
case "${OPT}" in
|
||||
--build-time=*)
|
||||
BUILD_TIME="${OPT##*=}"
|
||||
;;
|
||||
--sha256=*)
|
||||
SHA="${OPT##*=}"
|
||||
;;
|
||||
--job=*)
|
||||
JOB="${OPT##*=}"
|
||||
;;
|
||||
--main-binary-filepath=*)
|
||||
FILEPATH="${OPT##*=}"
|
||||
;;
|
||||
--*)
|
||||
warn "Ignoring unknown option: '${OPT}'"
|
||||
;;
|
||||
*)
|
||||
err "Unknown option: '${OPT}'"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
[ -z "${BUILD_TIME}" ] && die "The --build-time option must be specified"
|
||||
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
|
||||
[ -z "${JOB}" ] && die "The --job option must be specified"
|
||||
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
|
||||
|
||||
FILENAME="${1}"
|
||||
|
||||
: "${REPO:="/usr/local/www/pkg"}"
|
||||
: "${REPO_KEY:="/usr/local/etc/builder-web/repo.key"}"
|
||||
|
||||
if [ "$(basename "${FILEPATH}" .pkg)" = "$(basename "${FILEPATH}")" ]; then
|
||||
echo "Not a FreeBSD package"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if ls "${REPO}"/*/All/"${JOB}"-*."${SHA}".pkg > /dev/null; then
|
||||
echo "Same hash already present, nothing to do"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
TMP=$(mktemp -d -t repak)
|
||||
MANIFEST="${TMP}/+MANIFEST"
|
||||
TMPMANIFEST="${MANIFEST}.tmp"
|
||||
|
||||
cleanup () {
|
||||
rm -rf "${TMP}"
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
PKG_ROOT="${TMP}/pkg"
|
||||
|
||||
tar x -C "${TMP}" -f "${FILENAME}"
|
||||
mkdir "${PKG_ROOT}"
|
||||
mv "${TMP}/usr" "${PKG_ROOT}"
|
||||
|
||||
VERSION=$(jq -r '.version' "${MANIFEST}")
|
||||
# if we've a tagged version (1.5.0), append the number of commits and a dummy hash
|
||||
VERSION_GOOD=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$') || true
|
||||
VERSION_WITH_COMMIT=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+\.g[0-9a-fA-f]\+$') || true
|
||||
if [ $VERSION_GOOD -eq 0 -a $VERSION_WITH_COMMIT -eq 0 ]; then
|
||||
die "version does not conform to (MAJOR.MINOR.PATCH[.#NUM_COMMITS.g<HASH>])"
|
||||
fi
|
||||
if [ $VERSION_WITH_COMMIT -eq 0 ]; then
|
||||
VERSION="${VERSION}.0.g0000000"
|
||||
fi
|
||||
|
||||
NAME=$(jq -r '.name' "${MANIFEST}")
|
||||
FULL_VERSION="${VERSION}.${BUILD_TIME}.${SHA}"
|
||||
|
||||
jq -ca ".version=\"$FULL_VERSION\"" "${MANIFEST}" > "${TMPMANIFEST}"
|
||||
mv "${TMPMANIFEST}" "${MANIFEST}"
|
||||
|
||||
ABI=$(jq -r '.abi' "${MANIFEST}")
|
||||
REPO_DIR="${REPO}/${ABI}"
|
||||
PKG_DIR="${REPO_DIR}/All"
|
||||
|
||||
# to avoid races, first create the package in temporary directory
|
||||
# and then move it before recreating the index
|
||||
pkg create -r "${PKG_ROOT}" -m "${MANIFEST}" -o "${TMP}"
|
||||
mkdir -p "${PKG_DIR}"
|
||||
rm -f "${PKG_DIR}"/"${NAME}"-*.pkg
|
||||
mv "${TMP}/${NAME}-${FULL_VERSION}.pkg" "${PKG_DIR}"
|
||||
|
||||
pkg repo "${REPO_DIR}" "${REPO_KEY}"
|
|
@ -1,8 +1,8 @@
|
|||
name: builder_web
|
||||
name: builder-web
|
||||
version: %%VERSION_NUM%%
|
||||
origin: local/builder_web
|
||||
origin: local/builder-web
|
||||
comment: Builder web service
|
||||
www: https://git.robur.io/robur/builder-web
|
||||
www: https://git.robur.coop/robur/builder-web
|
||||
maintainer: Robur <team@robur.coop>
|
||||
prefix: /usr/local
|
||||
licenselogic: single
|
||||
|
@ -18,9 +18,56 @@ deps {
|
|||
},
|
||||
sqlite3 {
|
||||
origin = "databases/sqlite3";
|
||||
},
|
||||
opam-graph {
|
||||
origin = "local/opam-graph";
|
||||
},
|
||||
modulectomy {
|
||||
origin = "local/modulectomy";
|
||||
}
|
||||
}
|
||||
scripts : {
|
||||
pre-install = <<EOD
|
||||
if [ -n "${PKG_ROOTDIR}" ] && [ "${PKG_ROOTDIR}" != "/" ]; then
|
||||
PW="/usr/sbin/pw -R ${PKG_ROOTDIR}"
|
||||
else
|
||||
PW=/usr/sbin/pw
|
||||
fi
|
||||
echo "===> Creating groups."
|
||||
if ! ${PW} groupshow builder >/dev/null 2>&1; then
|
||||
echo "Creating group 'builder' with gid '497'."
|
||||
${PW} groupadd builder -g 497
|
||||
else
|
||||
echo "Using existing group 'builder'."
|
||||
fi
|
||||
echo "===> Creating users"
|
||||
if ! ${PW} usershow builder >/dev/null 2>&1; then
|
||||
echo "Creating user 'builder' with uid '497'."
|
||||
${PW} useradd builder -u 497 -g 497 -c "builder daemon" -d /nonexistent -s /usr/sbin/nologin
|
||||
else
|
||||
echo "Using existing user 'builder'."
|
||||
fi
|
||||
EOD;
|
||||
post-install = <<EOD
|
||||
mkdir -p -m 700 /var/db/builder-web
|
||||
chown builder:builder /var/db/builder-web
|
||||
|
||||
EOD;
|
||||
post-deinstall = <<EOD
|
||||
if [ -n "${PKG_ROOTDIR}" ] && [ "${PKG_ROOTDIR}" != "/" ]; then
|
||||
PW="/usr/sbin/pw -R ${PKG_ROOTDIR}"
|
||||
else
|
||||
PW=/usr/sbin/pw
|
||||
fi
|
||||
if ${PW} usershow builder >/dev/null 2>&1; then
|
||||
echo "==> You should manually remove the \"builder\" user. "
|
||||
fi
|
||||
if ${PW} groupshow builder >/dev/null 2>&1; then
|
||||
echo "==> You should manually remove the \"builder\" group "
|
||||
fi
|
||||
EOD;
|
||||
|
||||
}
|
||||
desc = <<EOD
|
||||
A web server serving and accepting reproducible builds
|
||||
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
#!/bin/sh -e
|
||||
|
||||
# only execute anything if either
|
||||
# - running under orb with package = builder
|
||||
# - not running under opam at all
|
||||
if [ "$ORB_BUILDING_PACKAGE" != "builder-web" -a "$OPAM_PACKAGE_NAME" != "" ]; then
|
||||
exit 0;
|
||||
fi
|
||||
|
||||
basedir=$(realpath "$(dirname "$0")"/../..)
|
||||
pdir=$basedir/packaging/FreeBSD
|
||||
bdir=$basedir/_build/install/default/bin
|
||||
|
@ -11,27 +18,35 @@ rootdir=$tmpd/rootdir
|
|||
sbindir=$rootdir/usr/local/sbin
|
||||
rcdir=$rootdir/usr/local/etc/rc.d
|
||||
libexecdir=$rootdir/usr/local/libexec
|
||||
sharedir=$rootdir/usr/local/share/builder-web
|
||||
confdir=$rootdir/usr/local/etc/builder-web
|
||||
|
||||
trap 'rm -rf $tmpd' 0 INT EXIT
|
||||
|
||||
mkdir -p "$rootdir"/usr/local/sbin \
|
||||
"$rootdir"/usr/local/libexec \
|
||||
"$rootdir"/usr/local/etc/rc.d
|
||||
mkdir -p "$sbindir" "$libexecdir" "$rcdir" "$sharedir" "$confdir/upload-hooks"
|
||||
|
||||
# stage service scripts
|
||||
install -U $pdir/rc.d/builder_web $rcdir/builder_web
|
||||
install -U "$pdir/rc.d/builder_web" "$rcdir/builder_web"
|
||||
|
||||
# stage app binaries
|
||||
install -U $bdir/builder-web $libexecdir/builder-web
|
||||
install -U "$bdir/builder-web" "$libexecdir/builder-web"
|
||||
|
||||
install -U $bdir/builder-migrations $sbindir/builder-migrations
|
||||
install -U $bdir/builder-db $sbindir/builder-db
|
||||
install -U "$bdir/builder-migrations" "$sbindir/builder-migrations"
|
||||
install -U "$bdir/builder-db" "$sbindir/builder-db"
|
||||
|
||||
# stage visualization scripts
|
||||
install -U "$basedir/packaging/batch-viz.sh" "$confdir/batch-viz.sh.sample"
|
||||
install -U "$basedir/packaging/visualizations.sh" "$confdir/upload-hooks/visualizations.sh.sample"
|
||||
|
||||
# example repo scripts
|
||||
install -U "$basedir/packaging/dpkg-repo.sh" "$sharedir/dpkg-repo.sh"
|
||||
install -U "$basedir/packaging/FreeBSD-repo.sh" "$sharedir/FreeBSD-repo.sh"
|
||||
|
||||
# create +MANIFEST
|
||||
flatsize=$(find "$rootdir" -type f -exec stat -f %z {} + |
|
||||
awk 'BEGIN {s=0} {s+=$1} END {print s}')
|
||||
|
||||
sed -e "s:%%FLATSIZE%%:${flatsize}:" "$pdir/MANIFEST" > "$manifest"
|
||||
sed -e "s:%%FLATSIZE%%:${flatsize}:" -e "/^[Vv]ersion:/s/-/./g" "$pdir/MANIFEST" > "$manifest"
|
||||
|
||||
{
|
||||
printf '\nfiles {\n'
|
||||
|
@ -43,6 +58,7 @@ sed -e "s:%%FLATSIZE%%:${flatsize}:" "$pdir/MANIFEST" > "$manifest"
|
|||
} | sed -e "s:${rootdir}::" >> "$manifest"
|
||||
|
||||
export SOURCE_DATE_EPOCH=$(git log -1 --pretty=format:%ct)
|
||||
pkg create -r "$rootdir" -M "$manifest" -o $basedir/
|
||||
mv $basedir/builder_web-*.txz $basedir/builder_web.txz
|
||||
rm $basedir/builder-web.install
|
||||
pkg create -r "$rootdir" -M "$manifest" -o "$basedir/"
|
||||
mv "$basedir"/builder-web-*.pkg "$basedir/builder-web.pkg"
|
||||
echo 'bin: [ "builder-web.pkg" ]' > "$basedir/builder-web.install"
|
||||
echo 'doc: [ "README.md" ]' >> "$basedir/builder-web.install"
|
||||
|
|
|
@ -33,7 +33,7 @@ procname="/usr/local/libexec/builder-web"
|
|||
|
||||
builder_web_start () {
|
||||
echo "Starting ${name}."
|
||||
/usr/sbin/daemon -S -p "${pidfile}" -u "${builder_web_user}" \
|
||||
/usr/sbin/daemon -S -r -P "${pidfile}" -u "${builder_web_user}" \
|
||||
"${procname}" ${builder_web_flags}
|
||||
}
|
||||
|
||||
|
|
78
packaging/README.md
Normal file
78
packaging/README.md
Normal file
|
@ -0,0 +1,78 @@
|
|||
# Package repository creation and update
|
||||
|
||||
Builder-web calls hooks when an upload of a successful build finished. These
|
||||
shell scripts automatically push builds to deb repositories (using aptly) and
|
||||
FreeBSD package repositories (using pkg).
|
||||
|
||||
Thus, as a client of the infrastructure, system packages can be easily
|
||||
installed using the package repositories (and updates are straightforward).
|
||||
|
||||
The tricky part is verioning: different input may result in the same output
|
||||
(i.e. if the build system is updated, it is unlikely this will result in change
|
||||
of output, and clients do not need to update their packages), and also due to
|
||||
the nature of opam, if a dependency (opam package) is released, the output may
|
||||
differ (although the final package version is not increased). We solve the
|
||||
latter by adapting the version number of packages: package version 1.5.2 becomes
|
||||
1.5.2-TIMESTAMP-SHA256 (on FreeBSD using '.' instead of '-'). The timestamp is
|
||||
of the form YYYYMMDDhhmmss. The SHA256 is the hex-encoded SHA256 checksum of
|
||||
the original binary package and can be used for lookup in the database.
|
||||
|
||||
## DPKG package repository
|
||||
|
||||
The dependencies are aptly and dpkg.
|
||||
|
||||
For the initial setup, a GPG private key is needed:
|
||||
```
|
||||
$ gpg --full-generate-key
|
||||
$ gpg --export --armor > gpg.pub
|
||||
```
|
||||
|
||||
Set `REPO_KEYID` in the shell script to the key identifier generated
|
||||
(`gpg --list-keys`), and make the gpg.pub available to clients
|
||||
(`cp gpg.pub ~/.aptly/public/`).
|
||||
|
||||
On clients, when the `~/.aptly/public` is served via http(s), add it to your
|
||||
/etc/apt/source.list and import the gpg public key (`apt-key add <gpg.pub>`):
|
||||
|
||||
```
|
||||
deb https://apt.robur.coop/ debian-10 main
|
||||
```
|
||||
|
||||
The `debian-10` can be exchanged with any platform you're building debian
|
||||
packages for.
|
||||
|
||||
Currently, the `dpkg-repo.sh` sets the HOME to `/home/builder` (where aptly
|
||||
expects its configuration), and uses the platform (from builder) as
|
||||
distribution.
|
||||
|
||||
## FreeBSD package repository
|
||||
|
||||
The dependency is FreeBSD's pkg utility.
|
||||
|
||||
For the initial setup, a RSA private key is needed:
|
||||
```
|
||||
$ openssl genrsa -out repo.key 4096
|
||||
$ chmod 0400 repo.key
|
||||
$ openssl rsa -in repo.key -out repo.pub -pubout
|
||||
```
|
||||
|
||||
And a directory that acts as package repository (`mkdir /usr/local/www/pkg`).
|
||||
Copy the public key to the package repository
|
||||
(`cp repo.pub /usr/local/www/pkg`) to make it available for clients.
|
||||
|
||||
Both can be configured in the shell script itself (`REPO` and `REPO_KEY`). The
|
||||
public key needs to be distributed to clients - e.g. put it at the root of the
|
||||
repository.
|
||||
|
||||
On clients, when that directory is served via http(s), it can be added to
|
||||
/usr/local/etc/pkg/repos/robur.conf:
|
||||
|
||||
```
|
||||
robur: {
|
||||
url: "https://pkg.robur.coop/${ABI}",
|
||||
mirror_type: "srv",
|
||||
signature_type: "pubkey",
|
||||
pubkey: "/path/to/repo.pub",
|
||||
enabled: yes
|
||||
}
|
||||
```
|
189
packaging/batch-viz.sh
Executable file
189
packaging/batch-viz.sh
Executable file
|
@ -0,0 +1,189 @@
|
|||
#!/bin/sh
|
||||
|
||||
prog_NAME=$(basename "${0}")
|
||||
|
||||
warn()
|
||||
{
|
||||
echo "${prog_NAME}: WARN: $*"
|
||||
}
|
||||
|
||||
info()
|
||||
{
|
||||
echo "${prog_NAME}: INFO: $*"
|
||||
}
|
||||
|
||||
err()
|
||||
{
|
||||
echo "${prog_NAME}: ERROR: $*" 1>&2
|
||||
}
|
||||
|
||||
die()
|
||||
{
|
||||
echo "${prog_NAME}: ERROR: $*" 1>&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
usage()
|
||||
{
|
||||
cat <<EOM 1>&2
|
||||
usage: ${prog_NAME} [ OPTIONS ]
|
||||
Generates visualizations of all things
|
||||
--data-dir=STRING
|
||||
Path to the data directory.
|
||||
--cache-dir=STRING
|
||||
Optional path to the cache directory. Defaults to DATA_DIR/_cache
|
||||
--viz-script=STRING
|
||||
Optional path to the visualizations.sh script. Defaults to ./visualizations.sh
|
||||
--ignore-done
|
||||
Optional flag to force script to ignore '.done' files
|
||||
EOM
|
||||
exit 1
|
||||
}
|
||||
|
||||
CACHE_DIR=
|
||||
DATA_DIR=
|
||||
VISUALIZATIONS_CMD="./visualizations.sh"
|
||||
IGNORE_DONE="false"
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
OPT="$1"
|
||||
|
||||
case "${OPT}" in
|
||||
--cache-dir=*)
|
||||
CACHE_DIR="${OPT##*=}"
|
||||
;;
|
||||
--data-dir=*)
|
||||
DATA_DIR="${OPT##*=}"
|
||||
;;
|
||||
--viz-script=*)
|
||||
VISUALIZATIONS_CMD="${OPT##*=}"
|
||||
;;
|
||||
--ignore-done)
|
||||
IGNORE_DONE="true"
|
||||
;;
|
||||
--*)
|
||||
warn "Ignoring unknown option: '${OPT}'"
|
||||
;;
|
||||
*)
|
||||
err "Unknown option: '${OPT}'"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
[ -z "$DATA_DIR" ] && die "The --data-dir option must be specified"
|
||||
|
||||
DB="${DATA_DIR}/builder.sqlite3"
|
||||
[ ! -e "$DB" ] && die "The database doesn't exist: '$DB'"
|
||||
|
||||
# Let's be somewhat lenient with the database version.
|
||||
# In visualizations.sh we can be more strict.
|
||||
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
|
||||
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
|
||||
[ "$DB_VERSION" -lt 16 ] && die "The database version should be >= 16. It is '$DB_VERSION'"
|
||||
|
||||
APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
|
||||
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
|
||||
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be = 1234839235. It is '$APP_ID'"
|
||||
|
||||
echo
|
||||
echo "-----------------------------------------------------------------------------"
|
||||
info "Starting batch creation of visualizations: $(date)"
|
||||
|
||||
if [ -z "$CACHE_DIR" ]; then
|
||||
CACHE_DIR="${DATA_DIR}/_cache"
|
||||
info "Defaulting --cache-dir to '$CACHE_DIR'"
|
||||
fi
|
||||
if [ ! -d "${CACHE_DIR}" ]; then
|
||||
info "Cache directory '$CACHE_DIR' doesn't exist, so it will be made"
|
||||
if ! mkdir "${CACHE_DIR}"; then
|
||||
die "Couldn't make cache directory: '$CACHE_DIR'"
|
||||
fi
|
||||
fi
|
||||
|
||||
[ ! -e "${VISUALIZATIONS_CMD}" ] && die "'$VISUALIZATIONS_CMD' doesn't exist"
|
||||
if [ -f "${VISUALIZATIONS_CMD}" ] && [ -x "${VISUALIZATIONS_CMD}" ]; then :; else
|
||||
die "'$VISUALIZATIONS_CMD' is not an executable"
|
||||
fi
|
||||
|
||||
OPAM_GRAPH="opam-graph"
|
||||
MODULECTOMY="modulectomy"
|
||||
|
||||
LATEST_TREEMAPVIZ_VERSION="$($MODULECTOMY --version)"
|
||||
[ $? -ne 0 ] && die "Couldn't get modulectomy version"
|
||||
LATEST_DEPENDENCIESVIZ_VERSION="$($OPAM_GRAPH --version)"
|
||||
[ $? -ne 0 ] && die "Couldn't get opam-graph version"
|
||||
|
||||
TREEMAP_CACHE_DIR="${CACHE_DIR}/treemap_${LATEST_TREEMAPVIZ_VERSION}"
|
||||
DEPENDENCIES_CACHE_DIR="${CACHE_DIR}/dependencies_${LATEST_DEPENDENCIESVIZ_VERSION}"
|
||||
|
||||
if
|
||||
[ "${IGNORE_DONE}" = "false" ] && \
|
||||
[ -f "${TREEMAP_CACHE_DIR}/.done" ] && \
|
||||
[ -f "${DEPENDENCIES_CACHE_DIR}/.done" ]; then
|
||||
info "Nothing to do"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
ATTEMPTED_VIZS=0
|
||||
FAILED_VIZS=0
|
||||
|
||||
distinct-input () {
|
||||
{
|
||||
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
|
||||
FROM build b
|
||||
JOIN build_artifact opam ON opam.build = b.id
|
||||
WHERE opam.filepath = 'opam-switch' AND b.main_binary NOT NULL
|
||||
GROUP BY opam.sha256;"
|
||||
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
|
||||
FROM build b
|
||||
JOIN build_artifact debug ON debug.build = b.id
|
||||
WHERE debug.filepath LIKE '%.debug' AND b.main_binary NOT NULL
|
||||
GROUP BY debug.sha256;"
|
||||
} | sort -u
|
||||
}
|
||||
|
||||
for UUID in $(distinct-input); do
|
||||
if ! "$VISUALIZATIONS_CMD" \
|
||||
--data-dir="${DATA_DIR}" \
|
||||
--cache-dir="${CACHE_DIR}" \
|
||||
--uuid="${UUID}"
|
||||
then
|
||||
FAILED_VIZS=$((FAILED_VIZS + 1))
|
||||
fi
|
||||
ATTEMPTED_VIZS=$((ATTEMPTED_VIZS + 1))
|
||||
done
|
||||
|
||||
if [ -n "$(ls -A "${TREEMAP_CACHE_DIR}")" ]; then
|
||||
touch "${TREEMAP_CACHE_DIR}/.done"
|
||||
|
||||
V=1
|
||||
while [ "$V" -lt "$LATEST_TREEMAPVIZ_VERSION" ]; do
|
||||
DIR_REMOVE="${CACHE_DIR}/treemap_${V}"
|
||||
if test -d "$DIR_REMOVE" && rm -r "$DIR_REMOVE"; then
|
||||
info "Removed old cache-directory: '$DIR_REMOVE'"
|
||||
fi
|
||||
V=$((V+1))
|
||||
done
|
||||
else
|
||||
warn "Treemap-viz cache-directory is still empty - problem?"
|
||||
fi
|
||||
|
||||
if [ -n "$(ls -A "${DEPENDENCIES_CACHE_DIR}")" ]; then
|
||||
touch "${DEPENDENCIES_CACHE_DIR}/.done"
|
||||
|
||||
V=1
|
||||
while [ "$V" -lt "$LATEST_DEPENDENCIESVIZ_VERSION" ]; do
|
||||
DIR_REMOVE="${CACHE_DIR}/dependencies_${V}"
|
||||
if test -d "$DIR_REMOVE" && rm -r "$DIR_REMOVE"; then
|
||||
info "Removed old cache-directory: '$DIR_REMOVE'"
|
||||
fi
|
||||
V=$((V+1))
|
||||
done
|
||||
else
|
||||
warn "Dependencies-viz cache-directory is still empty - problem?"
|
||||
fi
|
||||
|
||||
info "Batch creation of visualizations for $ATTEMPTED_VIZS binaries, finished with $FAILED_VIZS failures: $(date)"
|
||||
|
60
packaging/check_versions.sh
Executable file
60
packaging/check_versions.sh
Executable file
|
@ -0,0 +1,60 @@
|
|||
#!/bin/sh
|
||||
|
||||
# input in versions.txt is <v1> <v2> with v1 < v2.
|
||||
# v1 and v2 are of the form <version>-<date>-<hash>, where <version> includes:
|
||||
# - 2.0.0
|
||||
# - 2.0.0-10-gabcdef
|
||||
|
||||
freebsd_sanitize_version () {
|
||||
post=$(echo $1 | rev | cut -d '-' -f 1-2 | rev | sed -e 's/-/./g')
|
||||
v=$(echo $1 | rev | cut -d '-' -f 3- | rev | sed -e 's/-/./g')
|
||||
version_good=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$')
|
||||
version_with_commit=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+\.g[0-9a-fA-f]\+$')
|
||||
if [ $version_good -eq 0 -a $version_with_commit -eq 0 ]; then
|
||||
echo "invalid version $v";
|
||||
exit 1;
|
||||
fi
|
||||
if [ $version_with_commit -eq 0 ]; then
|
||||
v="${v}.0.g0000000.${post}"
|
||||
else
|
||||
v="${v}.${post}"
|
||||
fi
|
||||
echo $v
|
||||
}
|
||||
|
||||
echo "using FreeBSD pkg to compare versions now:"
|
||||
while read version_a version_b; do
|
||||
version_a=$(freebsd_sanitize_version $version_a)
|
||||
version_b=$(freebsd_sanitize_version $version_b)
|
||||
result=$(pkg version -t "$version_a" "$version_b")
|
||||
printf "%s %s %s\n" "$version_a" "$result" "$version_b"
|
||||
done < versions.txt
|
||||
|
||||
debian_sanitize_version () {
|
||||
post=$(echo $1 | rev | cut -d '-' -f 1-2 | rev)
|
||||
v=$(echo $1 | rev | cut -d '-' -f 3- | rev)
|
||||
version_good=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$')
|
||||
version_with_commit=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$')
|
||||
if [ $version_good -eq 0 -a $version_with_commit -eq 0 ]; then
|
||||
echo "invalid version $v";
|
||||
exit 1;
|
||||
fi
|
||||
if [ $version_with_commit -eq 0 ]; then
|
||||
v="${v}-0-g0000000-${post}"
|
||||
else
|
||||
v="${v}-${post}"
|
||||
fi
|
||||
echo $v
|
||||
}
|
||||
|
||||
echo ""
|
||||
echo "using Debian dpkg to compare versions now:"
|
||||
while read version_a version_b; do
|
||||
version_a=$(debian_sanitize_version $version_a)
|
||||
version_b=$(debian_sanitize_version $version_b)
|
||||
if dpkg --compare-versions "$version_a" lt "$version_b"; then
|
||||
echo "$version_a < $version_b"
|
||||
else
|
||||
echo "$version_a >= $version_b"
|
||||
fi
|
||||
done < versions.txt
|
12
packaging/debian/builder-web.service
Normal file
12
packaging/debian/builder-web.service
Normal file
|
@ -0,0 +1,12 @@
|
|||
[Unit]
|
||||
Description=Builder web daemon
|
||||
After=syslog.target network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=builder
|
||||
Group=builder
|
||||
ExecStart=/usr/libexec/builder-web
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
5
packaging/debian/changelog
Normal file
5
packaging/debian/changelog
Normal file
|
@ -0,0 +1,5 @@
|
|||
builder-web (%%VERSION_NUM%%) unstable; urgency=medium
|
||||
|
||||
* Initial release
|
||||
|
||||
-- Robur team <team@robur.coop>
|
1
packaging/debian/conffiles
Normal file
1
packaging/debian/conffiles
Normal file
|
@ -0,0 +1 @@
|
|||
/etc/builder-web/upload-hooks/visualizations.sh
|
13
packaging/debian/control
Normal file
13
packaging/debian/control
Normal file
|
@ -0,0 +1,13 @@
|
|||
Package: builder-web
|
||||
Version: %%VERSION_NUM%%
|
||||
Section: unknown
|
||||
Priority: optional
|
||||
Maintainer: Robur Team <team@robur.coop>
|
||||
Standards-Version: 4.4.1
|
||||
Homepage: https://git.robur.coop/robur/builder-web
|
||||
Vcs-Browser: https://git.robur.coop/robur/builder-web
|
||||
Vcs-Git: https://git.robur.coop/robur/builder-web.git
|
||||
Architecture: all
|
||||
Depends: libgmp10, libsqlite3-0, libev4, opam-graph, modulectomy
|
||||
Description: Web service for storing and presenting builds.
|
||||
Builder-web stores builds in a sqlite database and serves them via HTTP.
|
8
packaging/debian/copyright
Normal file
8
packaging/debian/copyright
Normal file
|
@ -0,0 +1,8 @@
|
|||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: builder-web
|
||||
Upstream-Contact: Robur Team <team@robur.coop>
|
||||
Source: https://git.robur.coop/robur/builder-web
|
||||
|
||||
Files: *
|
||||
Copyright: "Reynir Björnsson <reynir@reynir.dk>" "Hannes Mehnert <hannes@mehnert.org>"
|
||||
License: ISC
|
54
packaging/debian/create_package.sh
Executable file
54
packaging/debian/create_package.sh
Executable file
|
@ -0,0 +1,54 @@
|
|||
#!/bin/sh -e
|
||||
|
||||
# only execute anything if either
|
||||
# - running under orb with package = builder-web
|
||||
# - not running under opam at all
|
||||
if [ "$ORB_BUILDING_PACKAGE" != "builder-web" -a "$OPAM_PACKAGE_NAME" != "" ]; then
|
||||
exit 0;
|
||||
fi
|
||||
|
||||
basedir=$(realpath "$(dirname "$0")"/../..)
|
||||
bdir=$basedir/_build/install/default/bin
|
||||
tmpd=$basedir/_build/stage
|
||||
rootdir=$tmpd/rootdir
|
||||
sbindir=$rootdir/usr/sbin
|
||||
systemddir=$rootdir/usr/lib/systemd/system
|
||||
debiandir=$rootdir/DEBIAN
|
||||
libexecdir=$rootdir/usr/libexec
|
||||
sharedir=$rootdir/usr/share/builder-web
|
||||
confdir=$rootdir/etc/builder-web
|
||||
|
||||
trap 'rm -rf $tmpd' 0 INT EXIT
|
||||
|
||||
mkdir -p "$sbindir" "$debiandir" "$systemddir" "$libexecdir" "$sharedir" \
|
||||
"$confdir" "$confdir/upload-hooks"
|
||||
|
||||
# stage app binaries
|
||||
install "$bdir/builder-web" "$libexecdir/builder-web"
|
||||
install "$bdir/builder-migrations" "$sbindir/builder-migrations"
|
||||
install "$bdir/builder-db" "$sbindir/builder-db"
|
||||
|
||||
# service script
|
||||
install -m 0644 "$basedir/packaging/debian/builder-web.service" "$systemddir/builder-web.service"
|
||||
|
||||
# visualizations scripts
|
||||
install "$basedir/packaging/batch-viz.sh" "$confdir/batch-viz.sh"
|
||||
install "$basedir/packaging/visualizations.sh" "$confdir/upload-hooks/visualizations.sh"
|
||||
|
||||
# example repo scripts
|
||||
install "$basedir/packaging/dpkg-repo.sh" "$sharedir/dpkg-repo.sh"
|
||||
install "$basedir/packaging/FreeBSD-repo.sh" "$sharedir/FreeBSD-repo.sh"
|
||||
|
||||
# install debian metadata
|
||||
install -m 0644 "$basedir/packaging/debian/control" "$debiandir/control"
|
||||
install -m 0644 "$basedir/packaging/debian/changelog" "$debiandir/changelog"
|
||||
install -m 0644 "$basedir/packaging/debian/copyright" "$debiandir/copyright"
|
||||
install -m 0644 "$basedir/packaging/debian/conffiles" "$debiandir/conffiles"
|
||||
install "$basedir/packaging/debian/postinst" "$debiandir/postinst"
|
||||
|
||||
ARCH=$(dpkg-architecture -q DEB_TARGET_ARCH)
|
||||
sed -i -e "s/^Architecture:.*/Architecture: ${ARCH}/" "$debiandir/control"
|
||||
|
||||
dpkg-deb --build "$rootdir" "$basedir/builder-web.deb"
|
||||
echo 'bin: [ "builder-web.deb" ]' > "$basedir/builder-web.install"
|
||||
echo 'doc: [ "README.md" ]' >> "$basedir/builder-web.install"
|
17
packaging/debian/postinst
Normal file
17
packaging/debian/postinst
Normal file
|
@ -0,0 +1,17 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
BUILDER_WEB_USER=builder
|
||||
|
||||
if ! getent group "$BUILDER_WEB_USER" >/dev/null; then
|
||||
groupadd -g 497 $BUILDER_WEB_USER
|
||||
fi
|
||||
if ! getent passwd "$BUILDER_WEB_USER" >/dev/null; then
|
||||
useradd -g 497 -u 497 -d /nonexistent -s /usr/sbin/nologin $BUILDER_WEB_USER
|
||||
fi
|
||||
|
||||
mkdir -p /var/lib/builder-web
|
||||
chown "$BUILDER_WEB_USER:$BUILDER_WEB_USER" /var/lib/builder-web
|
||||
|
||||
if [ -d /run/systemd/system ]; then
|
||||
systemctl --system daemon-reload >/dev/null || true
|
||||
fi
|
144
packaging/dpkg-repo.sh
Executable file
144
packaging/dpkg-repo.sh
Executable file
|
@ -0,0 +1,144 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
export HOME="/home/builder"
|
||||
|
||||
prog_NAME=$(basename "${0}")
|
||||
|
||||
warn()
|
||||
{
|
||||
echo "${prog_NAME}: WARN: $*"
|
||||
}
|
||||
|
||||
err()
|
||||
{
|
||||
echo "${prog_NAME}: ERROR: $*" 1>&2
|
||||
}
|
||||
|
||||
die()
|
||||
{
|
||||
echo "${prog_NAME}: ERROR: $*" 1>&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
usage()
|
||||
{
|
||||
cat <<EOM 1>&2
|
||||
usage: ${prog_NAME} [ OPTIONS ] FILE
|
||||
Updates an aptly package repository
|
||||
Options:
|
||||
--build-time=STRING
|
||||
Build timestamp (used for the version of the package).
|
||||
--sha256=STRING
|
||||
Hex encoded SHA256 digest of the main binary.
|
||||
--job=STRING
|
||||
Job name that was built.
|
||||
--platform=STRING
|
||||
Platform name on which the build was performed.
|
||||
--main-binary-filepath=STRING
|
||||
The file path of the main binary.
|
||||
EOM
|
||||
exit 1
|
||||
}
|
||||
|
||||
BUILD_TIME=
|
||||
SHA=
|
||||
JOB=
|
||||
PLATFORM=
|
||||
FILEPATH=
|
||||
|
||||
while [ $# -gt 1 ]; do
|
||||
OPT="$1"
|
||||
|
||||
case "${OPT}" in
|
||||
--build-time=*)
|
||||
BUILD_TIME="${OPT##*=}"
|
||||
;;
|
||||
--sha256=*)
|
||||
SHA="${OPT##*=}"
|
||||
;;
|
||||
--job=*)
|
||||
JOB="${OPT##*=}"
|
||||
;;
|
||||
--platform=*)
|
||||
PLATFORM="${OPT##*=}"
|
||||
;;
|
||||
--main-binary-filepath=*)
|
||||
FILEPATH="${OPT##*=}"
|
||||
;;
|
||||
--*)
|
||||
warn "Ignoring unknown option: '${OPT}'"
|
||||
;;
|
||||
*)
|
||||
err "Unknown option: '${OPT}'"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
[ -z "${BUILD_TIME}" ] && die "The --build-time option must be specified"
|
||||
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
|
||||
[ -z "${JOB}" ] && die "The --job option must be specified"
|
||||
[ -z "${PLATFORM}" ] && die "The --platform option must be specified"
|
||||
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
|
||||
|
||||
FILENAME="${1}"
|
||||
|
||||
if [ $(basename "${FILEPATH}" .deb) = $(basename "${FILEPATH}") ]; then
|
||||
echo "Not a Debian package"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if aptly repo show -with-packages "${PLATFORM}" | grep "${SHA}" > /dev/null; then
|
||||
echo "Package with same SHA256 already in repository"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
TMP=$(mktemp -d -t debrep)
|
||||
|
||||
cleanup () {
|
||||
rm -rf "${TMP}"
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
PKG_ROOT="${TMP}/pkg"
|
||||
|
||||
mkdir "${PKG_ROOT}"
|
||||
|
||||
dpkg-deb -R "${FILENAME}" "${PKG_ROOT}"
|
||||
|
||||
VERSION=$(dpkg-deb -f "${FILENAME}" Version)
|
||||
# if we've a tagged version (1.5.0), append the number of commits and a dummy hash
|
||||
VERSION_GOOD=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$') || true
|
||||
VERSION_WITH_COMMIT=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$') || true
|
||||
if [ $VERSION_GOOD -eq 0 -a $VERSION_WITH_COMMIT -eq 0 ]; then
|
||||
die "version does not conform to (MAJOR.MINOR.PATCH[-#NUM_COMMITS-g<HASH>])"
|
||||
fi
|
||||
if [ $VERSION_WITH_COMMIT -eq 0 ]; then
|
||||
VERSION="${VERSION}-0-g0000000"
|
||||
fi
|
||||
|
||||
NEW_VERSION="${VERSION}"-"${BUILD_TIME}"-"${SHA}"
|
||||
|
||||
sed -i "" -e "s/Version:.*/Version: ${NEW_VERSION}/g" "${PKG_ROOT}/DEBIAN/control"
|
||||
|
||||
dpkg-deb --build "${PKG_ROOT}" "${TMP}"
|
||||
|
||||
if ! aptly repo show "${PLATFORM}" > /dev/null 2>&1; then
|
||||
aptly repo create --distribution="${PLATFORM}" "${PLATFORM}"
|
||||
fi
|
||||
|
||||
PACKAGE=$(dpkg-deb -f "${FILENAME}" Package)
|
||||
aptly repo remove "${PLATFORM}" "${PACKAGE}"
|
||||
aptly repo add "${PLATFORM}" "${TMP}"
|
||||
|
||||
: "${REPO_KEYID:="D5E2DC92617877EDF7D4FD4345EA05FB7E26053D"}"
|
||||
|
||||
if ! aptly publish show "${PLATFORM}" > /dev/null 2>&1; then
|
||||
aptly publish repo -gpg-key="${REPO_KEYID}" "${PLATFORM}"
|
||||
else
|
||||
aptly publish update -gpg-key="${REPO_KEYID}" "${PLATFORM}"
|
||||
fi
|
9
packaging/versions.txt
Normal file
9
packaging/versions.txt
Normal file
|
@ -0,0 +1,9 @@
|
|||
2.0.0-20220202-abcdef 2.0.1-20220203-123456
|
||||
2.0.0-10-gabcdef-20220202-hahh 2.0.0-10-gabcdef-20220203-hahh
|
||||
2.0.0-10-gabcdef-20220202-hahh 2.0.0-11-g123456-20220201-abcd
|
||||
2.0.0-10-gabcdef-20220202-hahh 2.0.0-110-g123456-20220201-abcd
|
||||
2.0.0-11-g123456-20220201-abcd 2.0.1-20220120-abcd
|
||||
3.0.0-20230101-abcd 3.0.1-20230204-bdbd
|
||||
1.5.0-20220516-a0d5a2 1.5.0-3-g26b5a59-20220527-0bc180
|
||||
1.5.0-3-g26b5a59-20220527-0bc180 1.5.1-20220527-0bc180
|
||||
0.1.0-20221120104301-f9e456637274844d45d9758ec661a136d0cda7966b075e4426b69fe6da00427b 0.1.0-237-g62965d4-20230527202149-6118c39221f318154e234098b5cffd4dc1d80f19cf2200cc6b1eb768dbf6decb
|
226
packaging/visualizations.sh
Executable file
226
packaging/visualizations.sh
Executable file
|
@ -0,0 +1,226 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
#set -x
|
||||
|
||||
prog_NAME=$(basename "${0}")
|
||||
|
||||
warn()
|
||||
{
|
||||
echo "${prog_NAME}: WARN: $*"
|
||||
}
|
||||
|
||||
info()
|
||||
{
|
||||
echo "${prog_NAME}: INFO: $*"
|
||||
}
|
||||
|
||||
err()
|
||||
{
|
||||
echo "${prog_NAME}: ERROR: $*" 1>&2
|
||||
}
|
||||
|
||||
die()
|
||||
{
|
||||
echo "${prog_NAME}: ERROR: $*" 1>&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
usage()
|
||||
{
|
||||
cat <<EOM 1>&2
|
||||
usage: ${prog_NAME} [ OPTIONS ]
|
||||
Generates visualizations
|
||||
Options:
|
||||
--uuid=STRING
|
||||
UUID of build.
|
||||
--data-dir=STRING
|
||||
Path to the data directory.
|
||||
--cache-dir=STRING
|
||||
Path to the cache directory.
|
||||
EOM
|
||||
exit 1
|
||||
}
|
||||
|
||||
UUID=
|
||||
CACHE_DIR=
|
||||
DATA_DIR=
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
OPT="$1"
|
||||
|
||||
case "${OPT}" in
|
||||
--uuid=*)
|
||||
UUID="${OPT##*=}"
|
||||
;;
|
||||
--cache-dir=*)
|
||||
CACHE_DIR="${OPT##*=}"
|
||||
;;
|
||||
--data-dir=*)
|
||||
DATA_DIR="${OPT##*=}"
|
||||
;;
|
||||
*)
|
||||
warn "Ignoring unknown option: '${OPT}' (Note that this script reads DB)"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
[ -z "${UUID}" ] && die "The --uuid option must be specified"
|
||||
[ -z "${CACHE_DIR}" ] && die "The --cache-dir option must be specified"
|
||||
[ -z "${DATA_DIR}" ] && die "The --data-dir option must be specified"
|
||||
|
||||
info "processing UUID '${UUID}'"
|
||||
|
||||
DB="${DATA_DIR}/builder.sqlite3"
|
||||
|
||||
# A new visualizations.sh script may be installed during an upgrade while the
|
||||
# old builder-web binary is running. In that case things can get out of sync.
|
||||
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
|
||||
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
|
||||
[ "$DB_VERSION" -ne 18 ] && die "The database version should be 18. It is '$DB_VERSION'"
|
||||
|
||||
APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
|
||||
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
|
||||
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be 1234839235. It is '$APP_ID'"
|
||||
|
||||
get_main_binary () {
|
||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
||||
FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id AND b.main_binary = ba.id
|
||||
WHERE uuid = '${UUID}';"
|
||||
}
|
||||
|
||||
BIN="${DATA_DIR}/$(get_main_binary)" || die "Failed to get main binary from database"
|
||||
[ -z "${BIN}" ] && die "No main-binary found in db '${DB}' for build '${UUID}'"
|
||||
|
||||
get_debug_binary () {
|
||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
||||
FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id
|
||||
WHERE
|
||||
uuid = '${UUID}'
|
||||
AND ba.filepath LIKE '%.debug';"
|
||||
}
|
||||
|
||||
DEBUG_BIN_RELATIVE="$(get_debug_binary)" || die "Failed to get debug binary from database"
|
||||
|
||||
get_opam_switch () {
|
||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
||||
FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id
|
||||
WHERE
|
||||
uuid = '${UUID}'
|
||||
AND ba.filepath = 'opam-switch';"
|
||||
}
|
||||
|
||||
OPAM_SWITCH="$(get_opam_switch)" || die "Failed to get opam switch from database"
|
||||
[ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '${DB}' for build '${UUID}'"
|
||||
OPAM_SWITCH="${DATA_DIR}/${OPAM_SWITCH}"
|
||||
|
||||
OPAM_GRAPH="opam-graph"
|
||||
MODULECTOMY="modulectomy"
|
||||
|
||||
LATEST_TREEMAPVIZ_VERSION="$(${MODULECTOMY} --version)" || die "Failed to get modulectomy version"
|
||||
LATEST_DEPENDENCIESVIZ_VERSION="$(${OPAM_GRAPH} --version)" || die "Failed to get opam-graph version"
|
||||
|
||||
TREEMAP_CACHE_DIR="${CACHE_DIR}/treemap_${LATEST_TREEMAPVIZ_VERSION}"
|
||||
DEPENDENCIES_CACHE_DIR="${CACHE_DIR}/dependencies_${LATEST_DEPENDENCIESVIZ_VERSION}"
|
||||
|
||||
mktemp_aux () {
|
||||
if [ "$(uname)" = "Linux" ]; then
|
||||
mktemp -t "$1.XXX"
|
||||
elif [ "$(uname)" = "FreeBSD" ]; then
|
||||
mktemp -t "$1"
|
||||
else
|
||||
die 'Unsupported platform'
|
||||
fi
|
||||
}
|
||||
|
||||
TMPTREE=$(mktemp_aux viz_treemap)
|
||||
TMPDEPENDENCIES=$(mktemp_aux viz_dependencies)
|
||||
|
||||
cleanup () {
|
||||
rm -rf "${TMPTREE}" "${TMPDEPENDENCIES}"
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
# /// Dependencies viz
|
||||
|
||||
if [ ! -d "${DEPENDENCIES_CACHE_DIR}" ]; then
|
||||
mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '${DEPENDENCIES_CACHE_DIR}'"
|
||||
fi
|
||||
|
||||
OPAM_SWITCH_FILEPATH='opam-switch'
|
||||
|
||||
get_opam_switch_hash () {
|
||||
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id
|
||||
WHERE uuid = '${UUID}'
|
||||
AND ba.filepath = '${OPAM_SWITCH_FILEPATH}';"
|
||||
}
|
||||
|
||||
DEPENDENCIES_INPUT_HASH="$(get_opam_switch_hash)" || die "Failed to get opam-switch hash from database"
|
||||
DEPENDENCIES_VIZ_FILENAME="${DEPENDENCIES_CACHE_DIR}/${DEPENDENCIES_INPUT_HASH}.html"
|
||||
|
||||
if [ -e "${DEPENDENCIES_VIZ_FILENAME}" ]; then
|
||||
info "Dependency visualization already exists: '${DEPENDENCIES_VIZ_FILENAME}'"
|
||||
else
|
||||
if ${OPAM_GRAPH} --output-format=html "${OPAM_SWITCH}" > "${TMPDEPENDENCIES}"; then
|
||||
cp "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}"
|
||||
rm "${TMPDEPENDENCIES}"
|
||||
else
|
||||
die "opam-graph failed to generate visualization"
|
||||
fi
|
||||
fi
|
||||
|
||||
# /// Treemap viz
|
||||
|
||||
stat_aux () {
|
||||
if [ "$(uname)" = "Linux" ]; then
|
||||
stat -c "%s" "$1"
|
||||
elif [ "$(uname)" = "FreeBSD" ]; then
|
||||
stat -f "%z" "$1"
|
||||
else
|
||||
die 'Unsupported platform'
|
||||
fi
|
||||
}
|
||||
|
||||
SIZE="$(stat_aux "${BIN}")"
|
||||
|
||||
if [ ! -d "${TREEMAP_CACHE_DIR}" ]; then
|
||||
mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '${TREEMAP_CACHE_DIR}'"
|
||||
fi
|
||||
|
||||
get_debug_bin_hash () {
|
||||
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id
|
||||
WHERE uuid = '${UUID}'
|
||||
AND ba.filepath LIKE '%.debug';"
|
||||
}
|
||||
|
||||
TREEMAP_INPUT_HASH="$(get_debug_bin_hash)" || die "Failed to get treemap input hash from database"
|
||||
TREEMAP_VIZ_FILENAME="${TREEMAP_CACHE_DIR}/${TREEMAP_INPUT_HASH}.html"
|
||||
|
||||
if [ -n "${DEBUG_BIN_RELATIVE}" ]; then
|
||||
DEBUG_BIN="${DATA_DIR}/$(get_debug_binary)"
|
||||
if [ -e "${TREEMAP_VIZ_FILENAME}" ]; then
|
||||
info "Treemap visualization already exists: '${TREEMAP_VIZ_FILENAME}'"
|
||||
else
|
||||
if
|
||||
${MODULECTOMY} \
|
||||
--robur-defaults \
|
||||
--with-scale="${SIZE}" \
|
||||
"${DEBUG_BIN}" \
|
||||
> "${TMPTREE}"
|
||||
then
|
||||
cp "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}"
|
||||
rm "${TMPTREE}"
|
||||
else
|
||||
die "modulectomy failed to generate visualization"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
info "No --debug-binary provided, not producing any treemap"
|
||||
fi
|
17
test/dune
17
test/dune
|
@ -1,3 +1,16 @@
|
|||
(test
|
||||
(name builder_db)
|
||||
(libraries builder_db caqti.blocking alcotest mirage-crypto-rng.unix))
|
||||
(name test_builder_db)
|
||||
(modules test_builder_db)
|
||||
(libraries ptime.clock.os builder_db caqti.blocking alcotest mirage-crypto-rng.unix ohex))
|
||||
|
||||
(test
|
||||
(name markdown_to_html)
|
||||
(modules markdown_to_html)
|
||||
(libraries builder_web cmarkit alcotest))
|
||||
|
||||
(test
|
||||
(name router)
|
||||
(modules router)
|
||||
(libraries builder_web fmt dream yojson alcotest)
|
||||
(preprocess
|
||||
(pps ppx_deriving.std ppx_deriving_yojson)))
|
||||
|
|
201
test/markdown_to_html.ml
Normal file
201
test/markdown_to_html.ml
Normal file
|
@ -0,0 +1,201 @@
|
|||
let markdown_to_html = Builder_web__Utils.md_to_html
|
||||
|
||||
let test_simple () =
|
||||
let markdown = {|# Hello world|} in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "simple html" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a>Hello world</h1>\n" html)
|
||||
|
||||
let test_html_script () =
|
||||
let markdown = {|# <script>Hello world</script>|} in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "html script header" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a><!-- CommonMark raw HTML omitted -->Hello world<!-- CommonMark raw HTML omitted --></h1>\n" html)
|
||||
|
||||
let test_preserve_span_content () =
|
||||
let markdown = {|* <span id="myref">My ref</span>
|
||||
* [See my ref](#myref) for more information|} in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "html span content preserved"
|
||||
{|<ul>
|
||||
<li><!-- CommonMark raw HTML omitted -->My ref<!-- CommonMark raw HTML omitted --></li>
|
||||
<li><a href="#myref">See my ref</a> for more information</li>
|
||||
</ul>
|
||||
|}
|
||||
html)
|
||||
|
||||
let test_remove_script () =
|
||||
let markdown = {|<script>alert(1);</script>|} in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "html script removed" "<!-- CommonMark HTML block omitted -->\n" html)
|
||||
|
||||
let test_list_with_html_block_and_markdown () =
|
||||
let markdown = "* <div> Hello, World!</div> *this is not html*" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "list with html block and markdown"
|
||||
(*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*)
|
||||
"<ul>\n<li>\n<!-- CommonMark HTML block omitted -->\n</li>\n</ul>\n"
|
||||
html)
|
||||
|
||||
let test_list_with_inline_html_and_markdown () =
|
||||
let markdown = "* <span> Hello, World!</span> *this is not html*" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "list with html block and markdown"
|
||||
"<ul>\n<li><!-- CommonMark raw HTML omitted --> Hello, World!<!-- CommonMark raw HTML omitted --> <em>this is not html</em></li>\n</ul>\n"
|
||||
html)
|
||||
|
||||
let test_absolute_link () =
|
||||
let markdown = "[foo](https://foo.com)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "absolute link" "<p><a href=\"https://foo.com\">foo</a></p>\n" html)
|
||||
|
||||
let test_relative_link () =
|
||||
let markdown = "[foo](../foo.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "relative link" "<p><a href=\"../foo.jpg\">foo</a></p>\n" html)
|
||||
|
||||
let test_absolute_image () =
|
||||
let markdown = "![alttext](https://foo.com/bar.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "absolute image"
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" ></p>\n" html)
|
||||
|
||||
let test_absolute_image_no_alt () =
|
||||
let markdown = "![](https://foo.com/bar.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "absolute image"
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html)
|
||||
|
||||
let test_relative_image () =
|
||||
let markdown = "![](/bar.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "relative image" "<p><img src=\"/bar.jpg\" alt=\"\" ></p>\n" html)
|
||||
|
||||
let test_absolute_image_script_alt () =
|
||||
let markdown = "![<script src=\"bla.js\"></script>](https://foo.com/bar.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "absolute image with script alt text"
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html)
|
||||
|
||||
let test_fragment_link () =
|
||||
let markdown = "[fragment](#fragment)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "fragment link" "<p><a href=\"#fragment\">fragment</a></p>\n" html)
|
||||
|
||||
let test_heading_adjustment () =
|
||||
let markdown = {|# foo
|
||||
## bar
|
||||
# baz
|
||||
## bazbar
|
||||
### bazbarbar
|
||||
#### bazbarbarbar
|
||||
##### bazbarbarbarbar
|
||||
###### bazbarbarbarbarbar
|
||||
|}
|
||||
in
|
||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
||||
(* NB: the maximum heading is 6 in cmarkit, thus we reduce the structure *)
|
||||
let exp = {|<h3 id="foo"><a class="anchor" aria-hidden="true" href="#foo"></a>foo</h3>
|
||||
<h4 id="bar"><a class="anchor" aria-hidden="true" href="#bar"></a>bar</h4>
|
||||
<h3 id="baz"><a class="anchor" aria-hidden="true" href="#baz"></a>baz</h3>
|
||||
<h4 id="bazbar"><a class="anchor" aria-hidden="true" href="#bazbar"></a>bazbar</h4>
|
||||
<h5 id="bazbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbar"></a>bazbarbar</h5>
|
||||
<h6 id="bazbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbar"></a>bazbarbarbar</h6>
|
||||
<h6 id="bazbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbar"></a>bazbarbarbarbar</h6>
|
||||
<h6 id="bazbarbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbarbar"></a>bazbarbarbarbarbar</h6>
|
||||
|} in
|
||||
Alcotest.(check string "header adjustment works fine" exp html)
|
||||
|
||||
let test_table () =
|
||||
let markdown = {__|| a | | b | c | d | e |
|
||||
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
|
||||
| entry | | **bla.file** | **other.file** | | |
|
||||
| _another entry_ | | **another.file** | **another.other** | | |
|
||||
|__}
|
||||
in
|
||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
||||
let exp = {|<div role="region"><table>
|
||||
<tr>
|
||||
<th>a</th>
|
||||
<th></th>
|
||||
<th>b</th>
|
||||
<th>c</th>
|
||||
<th>d</th>
|
||||
<th>e</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>entry</td>
|
||||
<td></td>
|
||||
<td><strong>bla.file</strong></td>
|
||||
<td><strong>other.file</strong></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><em>another entry</em></td>
|
||||
<td></td>
|
||||
<td><strong>another.file</strong></td>
|
||||
<td><strong>another.other</strong></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
</table></div>|} in
|
||||
Alcotest.(check string "table is rendered as html" exp html)
|
||||
|
||||
let test_table2 () =
|
||||
let markdown = {__|| a | | b | c | d | e |
|
||||
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
|
||||
| entry | | | | **bla.file** | **other.file** |
|
||||
| _another entry_ | | | **another.file** | **another.other** | |
|
||||
|__}
|
||||
in
|
||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
||||
let exp = {|<div role="region"><table>
|
||||
<tr>
|
||||
<th>a</th>
|
||||
<th></th>
|
||||
<th>b</th>
|
||||
<th>c</th>
|
||||
<th>d</th>
|
||||
<th>e</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>entry</td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td><strong>bla.file</strong></td>
|
||||
<td><strong>other.file</strong></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><em>another entry</em></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td><strong>another.file</strong></td>
|
||||
<td><strong>another.other</strong></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
</table></div>|} in
|
||||
Alcotest.(check string "table is rendered as html" exp html)
|
||||
|
||||
let markdown_tests = [
|
||||
Alcotest.test_case "Simple" `Quick test_simple;
|
||||
Alcotest.test_case "script header" `Quick test_html_script;
|
||||
Alcotest.test_case "preserve span content" `Quick test_preserve_span_content;
|
||||
Alcotest.test_case "Remove script" `Quick test_remove_script;
|
||||
Alcotest.test_case "List with html block and markdown" `Quick test_list_with_html_block_and_markdown;
|
||||
Alcotest.test_case "List with inline html and markdown" `Quick test_list_with_inline_html_and_markdown;
|
||||
Alcotest.test_case "absolute link" `Quick test_absolute_link;
|
||||
Alcotest.test_case "relative link" `Quick test_relative_link;
|
||||
Alcotest.test_case "absolute image" `Quick test_absolute_image;
|
||||
Alcotest.test_case "absolute image no alt" `Quick test_absolute_image_no_alt;
|
||||
Alcotest.test_case "relative image" `Quick test_relative_image;
|
||||
Alcotest.test_case "absolute image with script alt" `Quick test_absolute_image_script_alt;
|
||||
Alcotest.test_case "fragment link" `Quick test_fragment_link;
|
||||
Alcotest.test_case "heading adjustment" `Quick test_heading_adjustment;
|
||||
Alcotest.test_case "table" `Quick test_table;
|
||||
Alcotest.test_case "table2" `Quick test_table2;
|
||||
]
|
||||
|
||||
let () =
|
||||
Alcotest.run "Markdown to HTML" [
|
||||
"markdown", markdown_tests
|
||||
]
|
191
test/router.ml
Normal file
191
test/router.ml
Normal file
|
@ -0,0 +1,191 @@
|
|||
|
||||
module Param_verification = struct
|
||||
|
||||
(*> None is 'verified'*)
|
||||
type t = wrong_type option
|
||||
[@@deriving yojson,show,eq]
|
||||
|
||||
and wrong_type = {
|
||||
param : string;
|
||||
expected : string;
|
||||
}
|
||||
|
||||
let alcotyp = Alcotest.testable pp equal
|
||||
|
||||
module P = struct
|
||||
|
||||
let is_string : (string * string) -> _ option =
|
||||
Fun.const None
|
||||
|
||||
let is_uuid (param, value) =
|
||||
match Uuidm.of_string value with
|
||||
| Some _ when String.length value = 36 -> None
|
||||
| _ -> Some {
|
||||
param;
|
||||
expected = "Uuidm.t"
|
||||
}
|
||||
|
||||
end
|
||||
|
||||
let verify parameters req =
|
||||
let verified_params =
|
||||
List.fold_left (fun acc p ->
|
||||
match acc with
|
||||
| None ->
|
||||
if String.starts_with ~prefix:"build" p then
|
||||
P.is_uuid (p, Dream.param req p)
|
||||
else
|
||||
P.is_string (p, Dream.param req p)
|
||||
| Some _ as x -> x)
|
||||
None parameters
|
||||
in
|
||||
let response_json =
|
||||
verified_params |> to_yojson |> Yojson.Safe.to_string
|
||||
in
|
||||
Dream.respond response_json
|
||||
|
||||
end
|
||||
|
||||
let find_parameters path =
|
||||
List.filter_map (fun s ->
|
||||
if String.length s > 0 && String.get s 0 = ':' then
|
||||
Some (String.sub s 1 (String.length s - 1))
|
||||
else
|
||||
None)
|
||||
(String.split_on_char '/' path)
|
||||
|
||||
let router =
|
||||
(* XXX: this relies on [Builder_web.routes] only using {data,cache,config}dir
|
||||
* in the handlers which are never called here. The path /nonexistant is
|
||||
* assumed to not exist. *)
|
||||
let nodir = Fpath.v "/nonexistant" in
|
||||
Builder_web.routes ~datadir:nodir ~cachedir:nodir ~configdir:nodir ~expired_jobs:0
|
||||
|> List.map (fun (meth, route, _handler) ->
|
||||
meth, route, Param_verification.verify (find_parameters route))
|
||||
|> Builder_web.to_dream_routes
|
||||
|> Dream.router
|
||||
(* XXX: we test without remove_trailing_url_slash to ensure we don't produce
|
||||
* urls with trailing slashes: *)
|
||||
(* |> Builder_web.Middleware.remove_trailing_url_slash *)
|
||||
|> Dream.test
|
||||
|
||||
let test_link method_ target () =
|
||||
let req = Dream.request ~method_ ~target "" in
|
||||
let resp = router req in
|
||||
let status_code = Dream.(status resp |> status_to_int) in
|
||||
Alcotest.(check' int ~msg:"status-code" ~actual:status_code ~expected:200);
|
||||
let body =
|
||||
Dream.body resp
|
||||
|> Lwt_main.run
|
||||
|> Yojson.Safe.from_string
|
||||
|> Param_verification.of_yojson
|
||||
in
|
||||
Alcotest.(check' (result Param_verification.alcotyp string) ~msg:"param-verification"
|
||||
~actual:body ~expected:(Ok None))
|
||||
|
||||
let test_link_artifact artifact =
|
||||
let job_name = "test" in
|
||||
let build = Uuidm.v4_gen (Random.State.make_self_init ()) () in
|
||||
test_link `GET @@
|
||||
Builder_web.Link.Job_build_artifact.make ~job_name ~build ~artifact ()
|
||||
|
||||
let () =
|
||||
Alcotest.run "Router" [
|
||||
"Link module synced", Alcotest.[
|
||||
test_case "Link.Root.make" `Quick begin
|
||||
test_link `GET @@ Builder_web.Link.Root.make ()
|
||||
end;
|
||||
test_case "Link.Job.make" `Quick begin
|
||||
let queries = [ `Platform "test" ] in
|
||||
let job_name = "test" in
|
||||
test_link `GET @@ Builder_web.Link.Job.make ~queries ~job_name ()
|
||||
end;
|
||||
test_case "Link.Job.make_failed" `Quick begin
|
||||
let queries = [ `Platform "test" ] in
|
||||
let job_name = "test" in
|
||||
test_link `GET @@
|
||||
Builder_web.Link.Job.make_failed ~queries ~job_name ()
|
||||
end;
|
||||
test_case "Link.Job_build.make" `Quick begin
|
||||
let job_name = "test" in
|
||||
let build = Uuidm.v4_gen (Random.State.make_self_init ()) () in
|
||||
test_link `GET @@ Builder_web.Link.Job_build.make ~job_name ~build ()
|
||||
end;
|
||||
test_case "Link.Job_build_artifact.make_from_string" `Quick begin
|
||||
let job_name = "test" in
|
||||
let build = Uuidm.v4_gen (Random.State.make_self_init ()) () in
|
||||
let artifact = "" in
|
||||
test_link `GET @@
|
||||
Builder_web.Link.Job_build_artifact.make_from_string
|
||||
~job_name ~build ~artifact ()
|
||||
end;
|
||||
] @ Alcotest.(
|
||||
[
|
||||
`Main_binary;
|
||||
`Viz_treemap;
|
||||
`Viz_dependencies;
|
||||
`Script;
|
||||
`Console;
|
||||
`All_targz;
|
||||
`File Fpath.(v "some" / "path");
|
||||
]
|
||||
|> List.map (fun artifact ->
|
||||
let descr =
|
||||
Fmt.str "Job_build_artifact.make: %s" @@
|
||||
Builder_web.Link.Job_build_artifact.encode_artifact artifact
|
||||
in
|
||||
test_case descr `Quick begin
|
||||
test_link_artifact artifact
|
||||
end
|
||||
)
|
||||
) @ Alcotest.[
|
||||
test_case "Link.Compare_builds.make" `Quick begin
|
||||
let left = Uuidm.v4_gen (Random.State.make_self_init ()) () in
|
||||
let right = Uuidm.v4_gen (Random.State.make_self_init ()) () in
|
||||
test_link `GET @@
|
||||
Builder_web.Link.Compare_builds.make ~left ~right ()
|
||||
end;
|
||||
test_case "Link.Failed_builds.make" `Quick begin
|
||||
test_link `GET @@
|
||||
Builder_web.Link.Failed_builds.make ~count:2 ~start:1 ()
|
||||
end;
|
||||
];
|
||||
(* this doesn't actually test the redirects, unfortunately *)
|
||||
"Latest", List.map (fun p -> Alcotest.(test_case ("…"^p) `Quick begin
|
||||
test_link `GET @@ "/job/test/build/latest" ^ p end))
|
||||
[
|
||||
"/f/bin/unikernel.hvt";
|
||||
"/";
|
||||
"";
|
||||
];
|
||||
"Albatross hardcoded links",
|
||||
[
|
||||
(*> Note: these links can be found in
|
||||
albatross/command-line/albatross_client_update.ml
|
||||
.. to find them I follewed the trails of 'Albatross_cli.http_host'
|
||||
*)
|
||||
begin
|
||||
let sha_str =
|
||||
Digestif.SHA256.(to_raw_string (digest_string "foo"))
|
||||
|> Ohex.encode
|
||||
in
|
||||
Fmt.str "/hash?sha256=%s" sha_str
|
||||
end;
|
||||
begin
|
||||
let jobname = "foo" in
|
||||
"/job/" ^ jobname ^ "/build/latest"
|
||||
end;
|
||||
begin
|
||||
let job = "foo" in
|
||||
let build = Uuidm.(v4_gen (Random.State.make_self_init ()) () |> to_string) in
|
||||
"/job/" ^ job ^ "/build/" ^ build ^ "/main-binary"
|
||||
end;
|
||||
begin
|
||||
let old_uuid = Uuidm.(v4_gen (Random.State.make_self_init ()) () |> to_string) in
|
||||
let new_uuid = Uuidm.(v4_gen (Random.State.make_self_init ()) () |> to_string) in
|
||||
Fmt.str "/compare/%s/%s" old_uuid new_uuid
|
||||
end;
|
||||
]
|
||||
|> List.map Alcotest.(fun p ->
|
||||
test_case ("…" ^ p) `Quick (test_link `GET p))
|
||||
]
|
|
@ -1,8 +1,9 @@
|
|||
open Rresult.R.Infix
|
||||
let ( >>= ) = Result.bind
|
||||
let ( >>| ) x f = Result.map f x
|
||||
|
||||
module type CONN = Caqti_blocking.CONNECTION
|
||||
|
||||
let () = Mirage_crypto_rng_unix.initialize ()
|
||||
let () = Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna)
|
||||
|
||||
let iter f xs = List.fold_left (fun r x -> r >>= fun () -> f x) (Ok ()) xs
|
||||
let get_opt message = function
|
||||
|
@ -24,8 +25,8 @@ module Testable = struct
|
|||
x.restricted = y.restricted &&
|
||||
match x.password_hash, y.password_hash with
|
||||
| `Scrypt (hash, salt, params), `Scrypt (hash', salt', params') ->
|
||||
Cstruct.equal hash hash' &&
|
||||
Cstruct.equal salt salt' &&
|
||||
String.equal hash hash' &&
|
||||
String.equal salt salt' &&
|
||||
params = params'
|
||||
in
|
||||
let pp ppf { Builder_web_auth.username; password_hash; restricted } =
|
||||
|
@ -33,7 +34,7 @@ module Testable = struct
|
|||
| `Scrypt (hash, salt, { Builder_web_auth.scrypt_n; scrypt_r; scrypt_p }) ->
|
||||
Format.fprintf ppf "user:%s;(%d,%d,%d);%B;%a;%a" username
|
||||
scrypt_n scrypt_r scrypt_p restricted
|
||||
Cstruct.hexdump_pp hash Cstruct.hexdump_pp salt
|
||||
Ohex.pp hash Ohex.pp salt
|
||||
in
|
||||
Alcotest.testable
|
||||
pp
|
||||
|
@ -42,18 +43,15 @@ module Testable = struct
|
|||
let file =
|
||||
let equal (x : Builder_db.Rep.file) (y : Builder_db.Rep.file) =
|
||||
Fpath.equal x.filepath y.filepath &&
|
||||
Fpath.equal x.localpath y.localpath &&
|
||||
Cstruct.equal x.sha256 y.sha256 &&
|
||||
String.equal x.sha256 y.sha256 &&
|
||||
x.size = y.size
|
||||
in
|
||||
let pp ppf { Builder_db.Rep.filepath; localpath; sha256; size } =
|
||||
let pp ppf { Builder_db.Rep.filepath; sha256; size } =
|
||||
Format.fprintf ppf "{@[<v 1>@;<1 0>Builder_db.Rep.filepath = %a;@;<1 0>\
|
||||
localpath = %a;@;<1 0>\
|
||||
sha256 = %a;@;<1 0>\
|
||||
size = %d;@;<1 0>\
|
||||
@]@,}"
|
||||
Fpath.pp filepath Fpath.pp localpath
|
||||
Cstruct.hexdump_pp sha256 size
|
||||
Fpath.pp filepath Ohex.pp sha256 size
|
||||
in
|
||||
Alcotest.testable pp equal
|
||||
|
||||
|
@ -107,16 +105,6 @@ let test_user_update (module Db : CONN) =
|
|||
let auth_opt = Option.map snd res in
|
||||
Alcotest.(check (option Testable.builder_web_auth)) "update user" auth_opt (Some auth')
|
||||
|
||||
let test_user_remove (module Db : CONN) =
|
||||
Db.find_opt Builder_db.User.get_user username >>= function
|
||||
| None ->
|
||||
Alcotest.fail "user not found"
|
||||
| Some (id, _auth') ->
|
||||
Db.exec Builder_db.User.remove id >>= fun () ->
|
||||
Db.find_opt Builder_db.User.get_user username >>| fun res ->
|
||||
let auth_opt = Option.map snd res in
|
||||
Alcotest.(check (option Testable.builder_web_auth)) "remove user" auth_opt None
|
||||
|
||||
let test_user_auth (module Db : CONN) =
|
||||
Db.find_opt Builder_db.User.get_user username >>| function
|
||||
| None ->
|
||||
|
@ -134,44 +122,40 @@ let test_user_unauth (module Db : CONN) =
|
|||
(Builder_web_auth.verify_password "wrong" auth') false
|
||||
|
||||
let job_name = "test-job"
|
||||
let script = {|#!/bin/sh
|
||||
echo '#!/bin/sh' > bin/hello.sh
|
||||
echo 'echo Hello, World!' > bin/hello.sh
|
||||
|}
|
||||
let uuid = Uuidm.create `V4
|
||||
let console = [(0, "Hello, World!")]
|
||||
let script = Fpath.v "/dev/null"
|
||||
let uuid = Uuidm.v4_gen (Random.State.make_self_init ()) ()
|
||||
let console = Fpath.v "/dev/null"
|
||||
let start = Option.get (Ptime.of_float_s 0.)
|
||||
let finish = Option.get (Ptime.of_float_s 1.)
|
||||
let result = Builder.Exited 0
|
||||
let main_binary =
|
||||
let filepath = Result.get_ok (Fpath.of_string "bin/hello.sh") in
|
||||
let localpath = Result.get_ok (Fpath.of_string "/dev/null") in
|
||||
let data = "#!/bin/sh\necho Hello, World\n" in
|
||||
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
||||
let size = String.length data in
|
||||
{ Builder_db.Rep.filepath; localpath; sha256; size }
|
||||
{ Builder_db.Rep.filepath; sha256; size }
|
||||
let main_binary2 =
|
||||
let data = "#!/bin/sh\necho Hello, World 2\n" in
|
||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
||||
let size = String.length data in
|
||||
{ main_binary with sha256 ; size }
|
||||
let platform = "exotic-os"
|
||||
|
||||
let fail_if_none a =
|
||||
Option.to_result ~none:(`Msg "Failed to retrieve") a
|
||||
|
||||
let add_test_build user_id (module Db : CONN) =
|
||||
let r =
|
||||
let open Builder_db in
|
||||
Db.start () >>= fun () ->
|
||||
Db.exec Job.try_add job_name >>= fun () ->
|
||||
Db.find_opt Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.exec Build.add { Build.uuid; start; finish; result; console; script;
|
||||
main_binary = None; user_id;
|
||||
job_id } >>= fun () ->
|
||||
Db.exec Build.add { Build.uuid; start; finish; result; console; script; platform;
|
||||
main_binary = None; input_id = None; user_id; job_id } >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun id ->
|
||||
Db.exec Build_artifact.add (main_binary, id) >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun main_binary_id ->
|
||||
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
|
||||
Db.commit ()
|
||||
in
|
||||
Rresult.R.kignore_error
|
||||
~use:(fun _ -> Db.rollback ())
|
||||
r
|
||||
|
||||
let with_build_db f () =
|
||||
or_fail
|
||||
|
@ -180,10 +164,6 @@ let with_build_db f () =
|
|||
add_test_build user_id conn >>= fun () ->
|
||||
f conn)
|
||||
|
||||
let test_job_get_all (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Job.get_all () >>| fun jobs ->
|
||||
Alcotest.(check int) "one job" (List.length jobs) 1
|
||||
|
||||
let test_job_get_id_by_name (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>| fun _id ->
|
||||
()
|
||||
|
@ -199,8 +179,9 @@ let test_job_remove () =
|
|||
Db.exec Builder_db.Job.try_add "test-job" >>= fun () ->
|
||||
Db.find_opt Builder_db.Job.get_id_by_name "test-job" >>= fail_if_none >>= fun id ->
|
||||
Db.exec Builder_db.Job.remove id >>= fun () ->
|
||||
Db.collect_list Builder_db.Job.get_all () >>| fun jobs ->
|
||||
Alcotest.(check int) "no jobs" (List.length jobs) 0
|
||||
match Db.find Builder_db.Job.get id with
|
||||
| Error #Caqti_error.call_or_retrieve -> Ok ()
|
||||
| Ok _ -> Alcotest.fail "expected no job"
|
||||
in
|
||||
or_fail r
|
||||
|
||||
|
@ -214,12 +195,7 @@ let test_build_get_all (module Db : CONN) =
|
|||
Db.collect_list Builder_db.Build.get_all job_id >>| fun builds ->
|
||||
Alcotest.(check int) "one build" (List.length builds) 1
|
||||
|
||||
let test_build_get_all_meta (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.collect_list Builder_db.Build.get_all_meta job_id >>| fun builds ->
|
||||
Alcotest.(check int) "one build" (List.length builds) 1
|
||||
|
||||
let uuid' = Uuidm.create `V4
|
||||
let uuid' = Uuidm.v4_gen (Random.State.make_self_init ()) ()
|
||||
let start' = Option.get (Ptime.of_float_s 3600.)
|
||||
let finish' = Option.get (Ptime.of_float_s 3601.)
|
||||
|
||||
|
@ -229,11 +205,10 @@ let add_second_build (module Db : CONN) =
|
|||
Db.find_opt User.get_user username >>= fail_if_none >>= fun (user_id, _) ->
|
||||
Db.start () >>= fun () ->
|
||||
Db.find_opt Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.exec Build.add { Build.uuid; start; finish; result; console; script;
|
||||
main_binary = None; user_id; job_id;
|
||||
} >>= fun () ->
|
||||
Db.exec Build.add { Build.uuid; start; finish; result; console; script; platform;
|
||||
main_binary = None; input_id = None; user_id; job_id; } >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun id ->
|
||||
Db.exec Build_artifact.add (main_binary, id) >>= fun () ->
|
||||
Db.exec Build_artifact.add (main_binary2, id) >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun main_binary_id ->
|
||||
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
|
||||
Db.commit ()
|
||||
|
@ -242,38 +217,34 @@ let test_build_get_latest (module Db : CONN) =
|
|||
add_second_build (module Db) >>= fun () ->
|
||||
(* Test *)
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.find_opt Builder_db.Build.get_latest job_id
|
||||
Db.find_opt Builder_db.Build.get_latest_successful_with_binary (job_id, platform)
|
||||
>>| get_opt "no latest build" >>| fun (_id, meta, main_binary') ->
|
||||
Alcotest.(check (option Testable.file)) "same main binary" main_binary' (Some main_binary);
|
||||
Alcotest.(check Testable.file) "same main binary" main_binary2 main_binary';
|
||||
Alcotest.(check Testable.uuid) "same uuid" meta.uuid uuid'
|
||||
|
||||
let test_build_get_latest_uuid (module Db : CONN) =
|
||||
add_second_build (module Db) >>= fun () ->
|
||||
(* Test *)
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.find_opt Builder_db.Build.get_latest_uuid job_id
|
||||
>>| get_opt "no latest build" >>| fun (_id, latest_uuid) ->
|
||||
Alcotest.(check Testable.uuid) "same uuid" latest_uuid uuid'
|
||||
|
||||
let test_build_get_previous (module Db : CONN) =
|
||||
add_second_build (module Db) >>= fun () ->
|
||||
Db.find_opt Builder_db.Build.get_by_uuid uuid'
|
||||
>>| get_opt "no build" >>= fun (id, _build) ->
|
||||
Db.find_opt Builder_db.Build.get_previous_successful id
|
||||
>>| get_opt "no previous build" >>| fun (_id, meta) ->
|
||||
Alcotest.(check Testable.uuid) "same uuid" meta.uuid uuid
|
||||
Db.find_opt Builder_db.Build.get_previous_successful_different_output id
|
||||
>>| get_opt "no previous build" >>| fun build ->
|
||||
Alcotest.(check Testable.uuid) "same uuid" build.Builder_db.Build.uuid uuid
|
||||
|
||||
let test_build_get_previous_none (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_by_uuid uuid
|
||||
>>| get_opt "no build" >>= fun (id, _build) ->
|
||||
Db.find_opt Builder_db.Build.get_previous_successful id >>| function
|
||||
Db.find_opt Builder_db.Build.get_previous_successful_different_output id >>| function
|
||||
| None -> ()
|
||||
| Some (_id, meta) ->
|
||||
Alcotest.failf "Got unexpected result %a" Uuidm.pp meta.uuid
|
||||
| Some build ->
|
||||
Alcotest.failf "Got unexpected result %a" Uuidm.pp build.Builder_db.Build.uuid
|
||||
|
||||
let test_build_get_by_hash (module Db : CONN) =
|
||||
let test_build_get_with_jobname_by_hash (module Db : CONN) =
|
||||
add_second_build (module Db) >>= fun () ->
|
||||
Db.find_opt Builder_db.Build.get_by_hash main_binary.sha256
|
||||
Db.find_opt Builder_db.Build.get_with_jobname_by_hash main_binary.sha256
|
||||
>>| get_opt "no build" >>= fun (job_name', build) ->
|
||||
Alcotest.(check string) "same job" job_name' job_name;
|
||||
Alcotest.(check Testable.uuid) "same uuid" build.uuid uuid;
|
||||
Db.find_opt Builder_db.Build.get_with_jobname_by_hash main_binary2.sha256
|
||||
>>| get_opt "no build" >>| fun (job_name', build) ->
|
||||
Alcotest.(check string) "same job" job_name' job_name;
|
||||
Alcotest.(check Testable.uuid) "same uuid" build.uuid uuid'
|
||||
|
@ -290,12 +261,13 @@ let test_artifact_get_by_build_uuid (module Db : CONN) =
|
|||
get_opt "no build" >>| fun (_id, file) ->
|
||||
Alcotest.(check Testable.file) "same file" file main_binary
|
||||
|
||||
let test_artifact_get_by_build (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build.get_by_uuid uuid >>|
|
||||
get_opt "no build" >>= fun (id, _build) ->
|
||||
Db.find Builder_db.Build_artifact.get_by_build
|
||||
(id, main_binary.filepath)>>| fun (_id, file) ->
|
||||
Alcotest.(check Testable.file) "same file" file main_binary
|
||||
let test_artifact_exists_true (module Db : CONN) =
|
||||
Db.find Builder_db.Build_artifact.exists main_binary.sha256 >>| fun exists ->
|
||||
Alcotest.(check bool) "main binary exists" true exists
|
||||
|
||||
let test_artifact_exists_false (module Db : CONN) =
|
||||
Db.find Builder_db.Build_artifact.exists main_binary2.sha256 >>| fun exists ->
|
||||
Alcotest.(check bool) "main binary2 doesn't exists" false exists
|
||||
|
||||
(* XXX: This test should fail because main_binary on the corresponding build
|
||||
* references its main_binary. This is not the case now due to foreign key. *)
|
||||
|
@ -304,6 +276,39 @@ let test_artifact_remove_by_build (module Db : CONN) =
|
|||
get_opt "no build" >>= fun (id, _build) ->
|
||||
Db.exec Builder_db.Build_artifact.remove_by_build id
|
||||
|
||||
let test_get_builds_older_than (module Db : CONN) =
|
||||
add_second_build (module Db) >>= fun () ->
|
||||
let date = Option.get (Ptime.of_float_s (3600. /. 2.)) in
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, date) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "last build" builds [ uuid ];
|
||||
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, Ptime_clock.now ()) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
(* NOTE(dinosaure): from the most recent to the older. *)
|
||||
Alcotest.(check (list Testable.uuid)) "last builds" builds [ uuid'; uuid ];
|
||||
Ok ()
|
||||
|
||||
let test_builds_excluding_latest_n (module Db : CONN) =
|
||||
add_second_build (module Db) >>= fun () ->
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 1) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "keep recent build" builds [ uuid ];
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 2) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "keep 2 builds" builds [];
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 3) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "last more builds than we have" builds [];
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 0) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "delete all builds" builds [ uuid'; uuid ];
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, -1) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "test an incomprehensible argument (-1)" builds [ uuid'; uuid ];
|
||||
Ok ()
|
||||
|
||||
let () =
|
||||
let open Alcotest in
|
||||
Alcotest.run "Builder_db" [
|
||||
|
@ -312,7 +317,6 @@ let () =
|
|||
test_case "Get user" `Quick (with_user_db test_user_get_user);
|
||||
test_case "Remove user by name" `Quick (with_user_db test_user_remove_user);
|
||||
test_case "Update user" `Quick (with_user_db test_user_update);
|
||||
test_case "Remove user" `Quick (with_user_db test_user_remove);
|
||||
];
|
||||
"user-auth", [
|
||||
test_case "User auth success" `Quick (with_user_db test_user_auth);
|
||||
|
@ -320,7 +324,6 @@ let () =
|
|||
];
|
||||
"job", [
|
||||
test_case "Add build" `Quick (with_build_db (fun _ -> Ok ()));
|
||||
test_case "One job" `Quick (with_build_db test_job_get_all);
|
||||
test_case "Get job id" `Quick (with_build_db test_job_get_id_by_name);
|
||||
test_case "Get job" `Quick (with_build_db test_job_get);
|
||||
test_case "Remove job" `Quick test_job_remove;
|
||||
|
@ -328,17 +331,20 @@ let () =
|
|||
"build", [
|
||||
test_case "Get build" `Quick (with_build_db test_build_get_by_uuid);
|
||||
test_case "One build" `Quick (with_build_db test_build_get_all);
|
||||
test_case "One build (meta data)" `Quick (with_build_db test_build_get_all_meta);
|
||||
test_case "Get latest build" `Quick (with_build_db test_build_get_latest);
|
||||
test_case "Get latest build uuid" `Quick (with_build_db test_build_get_latest_uuid);
|
||||
test_case "Get build by hash" `Quick (with_build_db test_build_get_by_hash);
|
||||
test_case "Get build by hash" `Quick (with_build_db test_build_get_with_jobname_by_hash);
|
||||
test_case "Get previous build" `Quick (with_build_db test_build_get_previous);
|
||||
test_case "Get previous build when first" `Quick (with_build_db test_build_get_previous_none);
|
||||
];
|
||||
"build-artifact", [
|
||||
test_case "Get all by build" `Quick (with_build_db test_artifact_get_all_by_build);
|
||||
test_case "Get by build uuid" `Quick (with_build_db test_artifact_get_by_build_uuid);
|
||||
test_case "Get by build" `Quick (with_build_db test_artifact_get_by_build);
|
||||
test_case "Artifact exists" `Quick (with_build_db test_artifact_exists_true);
|
||||
test_case "Other artifact doesn't exists" `Quick (with_build_db test_artifact_exists_false);
|
||||
test_case "Remove by build" `Quick (with_build_db test_artifact_remove_by_build);
|
||||
];
|
||||
"vacuum", [
|
||||
test_case "Get builds older than now" `Quick (with_build_db test_get_builds_older_than);
|
||||
test_case "Get older builds and keep a fixed number of then" `Quick (with_build_db test_builds_excluding_latest_n);
|
||||
]
|
||||
]
|
Loading…
Reference in a new issue