Compare commits

...

109 commits

Author SHA1 Message Date
083f961ac4 Merge pull request 'output SHA256 sum without spaces' (!3) from style into main
Reviewed-on: #3
2024-09-24 12:16:18 +00:00
cd1b2a0b26 output SHA256 sum without spaces 2024-09-24 13:44:40 +02:00
402894405d Merge pull request 'v0.2.0 CHANGES.md' (!2) from prepare-v0.2.0 into main
Reviewed-on: #2
2024-09-10 09:25:28 +00:00
84737a4871 v0.2.0 CHANGES.md 2024-09-06 14:25:47 +02:00
47b1759d0f Merge pull request 'remove usage of cstruct, require mirage-crypto 1.0.0' (!1) from no-cstruct into main
Reviewed-on: #1
Reviewed-by: Reynir Björnsson <reynir@reynir.dk>
2024-09-05 15:12:37 +00:00
6d50b4bff4 remove superfluous mirage-crypto dependency 2024-09-05 17:02:14 +02:00
0f68776819 update builder depedency 2024-09-05 17:00:00 +02:00
598f8230bd update caqti constraint 2024-09-05 16:58:19 +02:00
ca0214c606 minor (grep -i cstruct) 2024-09-02 16:37:59 +02:00
07831d7de3 remove usage of cstruct, require mirage-crypto 1.0.0 2024-09-02 16:14:26 +02:00
71a5b0da96 Use Lwt.reraise instead of Lwt.fail 2024-08-22 14:04:28 +02:00
32ea8d2224
Merge pull request #3 from robur-coop/tar.3.0.0
Update to tar 3.0.0
2024-08-16 08:38:03 +02:00
c1cfe52cb0 Update to tar 3.0.0
Also embed a comment in the archive what software produced it.
2024-08-14 16:13:35 +02:00
9216d980b6
Merge pull request #2 from robur-coop/caqti.2.1.1
Update to Caqti.2.1.1
2024-08-13 16:47:59 +02:00
e73f7c5aa3 Set upper bound on tar 2024-08-13 13:16:31 +02:00
4461a91f87 Update to caqti>=2.1.1
The tuple type constructors are renamed from tupN to tN. Also, except
for migrations, use the wider tuple types (up to 12 since caqti.2.1.0).
2024-08-13 13:16:31 +02:00
b96c3a0479 Merge pull request 'Check version in visualizations.sh' (#188) from fix-visualizations into main
Reviewed-on: #188
2024-03-25 15:56:04 +00:00
cc6b084d7a Check version in visualizations.sh
If builder-web is upgraded and not restarted the visualizations script
will assume the new database schema while the still-running builder-web
binary will be using the old schema (until restarted).
2024-03-25 16:46:52 +01:00
97b8bb1d85 Merge pull request 'Implement builder-db vacuum {older-than,latest-n}' (#185) from vacuum into main
Reviewed-on: #185
2024-02-13 15:08:59 +00:00
2d36db435f Document vacuum default behavior, remove comment 2024-02-13 16:08:06 +01:00
e96234488f Add BUILDER_WEB_DATADIR env, fix bug
The builder-web commands now understand the BUILDER_WEB_DATADIR
environment variable which is used as --datadir.

During a change the transaction when vacuuming was committed twice which
is an error in sqlite. This was found during testing.
2024-02-13 14:23:25 +01:00
878acf002f Implement builder-db vacuum except-latest-successful
And some minor things.
2024-02-13 14:07:16 +01:00
d4da5a199f Implement builder-db vacuum {older-than,latest-n}
- `builder-db vacuum older-than [--job JOBNAME] RFC3339` removes all
  builds in `JOBNAME` (defaults to all jobs) that were built before
  `RFC3339`.
- `builder-db vacuum except-latest [--job JOBNAME] LATEST-N` removes all
  builds in `JOBNNAME` (defaults to all jobs) except for the latest
  `LATEST-N` builds (successful or not).
2024-02-12 15:09:08 +01:00
8c62314100 Merge pull request 'Add two SQL requests to collect older builds according to a parameter' (#183) from collect-old-builds into main
Reviewed-on: #183
2024-02-12 11:26:47 +00:00
46df7f92a7 Aggregate all builds (even failed builds) 2024-02-12 12:19:52 +01:00
Robur
082f2582dd Add a comment on LIMIT -1 OFFSET n 2024-02-12 12:19:52 +01:00
Robur
6594c6b912 Rename get_builds_and_exclude_the_first 2024-02-12 12:19:52 +01:00
Robur
73e1bf81ce Fix comparison: less than or equal, not strict equal 2024-02-12 12:19:52 +01:00
53f1c4958c Use our own ptime serializer 2024-02-12 12:19:52 +01:00
fb49d8eae2 Add two SQL requests to collect older builds according to a parameter 2024-02-12 12:19:52 +01:00
Robur
81be4229c1 model: only commit files that need to be saved 2024-01-09 16:42:13 +00:00
Robur
22f3eb3893 model, add_build: first put the files into place, then commit to the database 2024-01-09 16:37:39 +00:00
Robur
5d4d84f705 builder-db: update current database version 2024-01-09 16:28:03 +00:00
13dd238843 Merge pull request 'content addressing' (#174) from content-addressing into main
Reviewed-on: #174
2024-01-09 15:08:10 +00:00
Robur
c45488dd73 hook scripts & repo scripts: provide and use --main-binary-filepath 2024-01-09 15:05:09 +00:00
Robur
1e522e2b39 builder_db_app: verify_data_dir: compute size and sha256 only once per artifact 2024-01-09 14:48:53 +00:00
Robur
f66932da48 content addressing migration: add indices 2024-01-09 14:31:49 +00:00
Robur
7dfd160d8e Adapt visualizations.sh to content addressing 2024-01-09 14:27:52 +00:00
Robur
3bc14ba365 Remove Model.artifacts_dir
The directory is created later and the function is otherwise unused
2024-01-09 14:16:04 +00:00
36afb35e08 Update builder-db with content addressing 2024-01-09 13:32:13 +00:00
9f5458c8f4 Fix migration script
It didn't even compile :(
2024-01-09 13:32:13 +00:00
78a66dc089 Add migration script for content-addressed artifacts 2024-01-09 13:32:13 +00:00
1452de6280 Don't mask error in test setup 2024-01-09 13:32:13 +00:00
5ec5cb66df Update tests and fix bug discovered 2024-01-09 13:32:13 +00:00
a56bd28e64 Add missing right paren 2024-01-09 13:32:13 +00:00
f636280f10 WIP content addressing 2024-01-09 13:32:08 +00:00
7f3a6719e2 require cmarkit 0.3
work around the issue reported at https://github.com/dbuenzli/cmarkit/issues/14
add a second table test, fix the tests for the 0.3 fixed layouting
2023-12-14 15:10:12 +01:00
46d8ba611c Merge pull request 'Fixup: latest succesful build has main binary' (#181) from fix-front into main
Reviewed-on: #181
2023-11-22 13:09:24 +00:00
57a11d4385 Fixup: latest succesful build has main binary 2023-11-22 14:07:28 +01:00
fc253c616d Merge pull request 'fix get_latest_successful_with_binary to only return builds with main_binary' (#180) from fix-front into main
Reviewed-on: #180
2023-11-22 12:44:24 +00:00
e262ae9d90 Rewrite query to not use LEFT JOIN
Since we now consider only builds with main binaries successfull we can
rewrite get_latest_successful_with_binary to not use a LEFT JOIN.
2023-11-22 13:43:09 +01:00
39da49363f Merge pull request 'Enable non-strict mode for cmarkit to render tables' (#179) from cmarkit-table into main
Reviewed-on: #179
2023-11-22 12:26:59 +00:00
6c4f36bfba fix get_latest_successful_with_binary to only return builds with main_binary
so-called successful builds. in a756f2c..3b98605f there were regressions
(missing this condition in get_latest_succesful_with_binary,
get_next_successful_different_output, get_previous_successful_different_output).
2023-11-22 12:28:19 +01:00
ded21b9131 Enable non-strict mode for cmarkit to render tables 2023-11-22 12:13:42 +01:00
Robur
bf35e3fbae repo scripts: remove existing packages
In the end the packaging tooling does not like multiple versions of
the same package. Thus we remove old packages before publishing.
2023-11-20 12:11:52 +00:00
Robur
6ba2994dcf Fix debian packaging (version to always include -#commit-g<commit-id>)
If the build is a release, use -0-g0000000.

Also add this sanitization to the check_versions.sh
2023-11-20 11:52:59 +00:00
46f93c28ea Merge pull request 'only show active jobs, fixes #162' (#176) from orphan-old-builds into main
Reviewed-on: #176
2023-09-26 11:18:40 +00:00
378f5c4538 UI enhancement: all/active build modal
When showing the active builds link to all builds and vice versa.
2023-09-19 15:07:47 +02:00
bfa06c95f8 whitespace-cleanup 2023-09-19 15:07:47 +02:00
a9799f4ca8 delay computation to when the request lands 2023-09-19 15:07:47 +02:00
848186bd1a make the expiry of jobs configurable via cli (default 30) 2023-09-19 15:07:47 +02:00
95b4856179 skip jobs with no builds 2023-09-19 15:07:47 +02:00
8dedc8b95b only show active jobs, fixes #162 2023-09-19 15:07:47 +02:00
fde834ad02 Initialize rng 2023-09-19 15:07:12 +02:00
89f2c54973 Fix migrate-2023-09-11 by dropping hte index first 2023-09-18 14:09:54 +02:00
db115ad140 Merge pull request 'further CI fixes: require alcotest 1.2.0 (for check'), disable formatting (no .ocamlformat)' (#175) from lower-bound-fix into main
Reviewed-on: #175
2023-09-18 09:08:39 +00:00
e0bc795735 further CI fixes: require alcotest 1.2.0 (for check'), disable formatting (no .ocamlformat) 2023-09-18 10:52:02 +02:00
48ce55e73e Merge pull request 'fix CI' (#173) from ci into main
Reviewed-on: #173
2023-09-12 13:47:51 +00:00
2b30ab6bf9 lower-bounds: add one for decompress (needing the gz subpackage) 2023-09-12 13:13:51 +02:00
665bd0dfe8 add yojson with-test dependency 2023-09-12 13:11:14 +02:00
207252401f auto-format 2023-09-12 13:10:10 +02:00
8152bc0d14 Merge pull request 'regard a successful build only these with a main_artifact present' (#168) from fix-85 into main
Reviewed-on: #168
2023-09-11 11:34:38 +00:00
3fe8bcb997 Add migration for modified index 2023-09-11 12:31:11 +02:00
3b98605fb7 redefine successful vs not successful build:
a successful build is when the main_binary is NOT NULL, an unsuccessful is when
main_binary is NULL
2023-09-11 11:28:38 +02:00
b27570ef11 Builder_db: get_all_failed, get_failed_builds: reconsider failed builds
As remarked by @reynir, consider builds with no main_binary as failed.
2023-09-11 11:28:38 +02:00
8a70e76032 regard a successful build only these with a main_artifact present 2023-09-11 11:28:38 +02:00
a756f2c814 Merge pull request 'verify_cache_dir: only consider non-failed builds (main_binary IS NOT NULL)' (#170) from fix-138 into main
Reviewed-on: #170
2023-09-11 09:10:39 +00:00
a333d4eb9d verify_cache_dir: only consider where main_binary is present 2023-09-11 11:10:03 +02:00
6cbf7d4009 whitespace cleanup in batch-viz.sh 2023-09-11 11:10:03 +02:00
c2cf97436d whitespace cleanup in builder_db_app 2023-09-11 11:10:03 +02:00
1dc9a7b0fc Merge pull request 'builder-web is compatible with dream.1.0.0~alpha5' (#172) from dream.1.0.0-alpha5 into main
Reviewed-on: #172
2023-09-11 09:08:41 +00:00
0699473333 builder-web is compatible with dream.1.0.0~alpha5 2023-09-11 11:02:23 +02:00
d6f172b777 Merge pull request 'replace omd with cmarkit' (#167) from cmarkit into main
Reviewed-on: #167
2023-09-11 08:34:53 +00:00
4222f15162 add test for heading adjustment 2023-09-11 10:22:16 +02:00
1293e081c6 Adjust heading from README to at least level 2 (fixes #164) 2023-09-11 10:22:16 +02:00
5feb615e12 replace omd with cmarkit 2023-09-11 10:22:16 +02:00
e6af891748 roburio -> robur-coop 2023-09-09 11:42:34 +02:00
530163a492 Merge pull request 'Remove the /job and /job/:job/build "redirect parent" parts from the router' (#169) from requests into main
Reviewed-on: #169
2023-09-08 19:02:28 +00:00
544d6883a0 Remove the /job and /job/:job/build "redirect parent" parts from the router
Also, when "remove slash and redirect" (in the "Middleware"), use a 301 status.
This is well recognized by browsers, and leads to a request to the respective
resource (in contrast to 308. 308 may be useful for other HTTP methods than GET,
but our entire redirect middleware only handles GET and HEAD anyways (POST are
excluded).
2023-08-27 22:02:47 +02:00
d43c3aa26c Merge pull request 'Fix build of builder-web with more recent packages:' (#166) from fix-build into main
Reviewed-on: #166
2023-08-25 09:52:34 +00:00
6f30d5d144 Fix build of builder-web with more recent packages:
Cstruct.copy is deprecated
Mirage_crypto_rng_unix.initialize requires the RNG module as paramater (since mirage-crypto-rng 0.11.0)
Lwt_result.catch takes a function (unit -> 'a Lwt.t) since lwt 5.7.0
2023-08-25 10:09:21 +02:00
61575c0f79 git.robur.io is now git.robur.coop 2023-06-03 15:34:42 +02:00
b718a0e4ea take care of git pins, attempt 2 2023-05-31 17:18:24 +02:00
12383cbd06 duniverse diff: check for hashes on both r and l 2023-05-31 17:11:47 +02:00
62965d4f90 FreeBSD rc script: restart the service on termination 2023-05-27 20:06:44 +02:00
Robur
e7b3e13597 Fix compilation 2023-03-13 15:25:52 +00:00
Robur
ae1d8c553f Show an error when parsing duniverse fails 2023-03-13 15:25:14 +00:00
64045f7dec The opamdiff for a duniverse attempts to parse the x-opam-monorepo-duniverse-dirs
Previously, an entry pinned to a commit, such as openvpn:
    [
      "git+https://github.com/roburio/openvpn.git#5041b2837fe299138fae95649cb812a3930be163"
      "openvpn"
    ]

couldn't be decoded (since the form was assumed to be List [ URL ; DIR ; HASHES ]

Now, the parser is extended to cope with List [ URL ; DIR ] as well.

Seen on https://builds.robur.coop/compare/dde0380c-11f8-4e4f-8e91-d4ffcf5f4e08/35d0bddc-e118-44c5-bbba-f5e53cccca2d
2023-03-13 14:46:35 +00:00
ab45412a7c Merge pull request 'Visualization script enhancements' (#136) from viz-script-fixes into main
Reviewed-on: https://git.robur.io/robur/builder-web/pulls/136
2023-03-13 14:44:26 +00:00
Robur
4cc81f9950 visualizations.sh: use curly braces for variables 2023-03-13 14:43:54 +00:00
a0539604dc treat mirage (>=4.2.0) unikernels as unikernels, since they have x-mirage-opam-lock-location in the opam file
fixes #151
2023-03-13 11:50:05 +00:00
40b31ed691 Merge pull request 'adapt to omd 2.0.0 alpha3, which adds a Table constructor' (#160) from omd-2-alpha3 into main
Reviewed-on: https://git.robur.io/robur/builder-web/pulls/160
2023-03-13 11:42:35 +00:00
Robur
92fb616980 Update omd to 2.0.0~alpha3 2023-03-13 11:41:59 +00:00
Robur
ae2a920a42 Merge branch '20220802_test_hardcoded_links_in_albatross_and_builder' 2023-03-13 11:32:45 +00:00
Robur
56b0b7c990 Router test: consider only parameters in route
Also update albatross tests
2023-03-13 11:31:21 +00:00
02dbe1af37 adapt to omd 2.0.0 alpha3, which adds a Table constructor 2023-03-10 15:48:04 +01:00
1924781bb6 batch-viz: Get UUIDs with distinct inputs
This should result in less complaints about visualizations already
existing.
2022-08-26 13:18:30 +02:00
aa1af6c2bd visualizations.sh: use cp;rm instead of mv in /tmp
mktemp creates a file with mode 0600 and group wheel (inherited from
/tmp/). The command mv is then not able to set group ownership to wheel
in /var/db/builder-web/ on FreeBSD and emits a warning. Furthermore, the
restrictive permissions are preserved. Copying and then removing the
temporary file avoids these issues.
2022-08-26 12:01:09 +02:00
rand00
60db240866 test/Router: Added tests for hardcoded links present in Albatross 2022-08-02 18:52:33 +02:00
59 changed files with 1592 additions and 776 deletions

View file

@ -1,3 +1,27 @@
# v0.1.0 (2021-11-12)
## v0.2.0 (2024-09-05)
A whole slew of changes. Internally, we made a lot of incremental changes and improvements without doing a release. Thus this release is rather big. There is a lot of database migrations to apply, and unfortunately they have to be applied one at a time.
* Add a /failed-builds/ endpoint that lists the most recent failed builds.
* By default don't display failed builds on the front page.
* Times are printed with the 'Z' time zone offset indicator.
* Link to comparisons of builds take into account whether the "input", among others the list of dependencies, is different.
* New subcommand `builder-db extract-build` takes a build UUID and extracts the builder "full" file.
* Add /job/<job>/build/<build>/all.tar.gz endpoint with a gzip compressed tar archive of all build artifacts.
* Visual overhaul.
* Add (optional) visualizations displaying package dependencies ("opam-graph") and for unikernels a "modulectomy" view of how much each OCaml module is contributing to the final binary size. The visualizations are read from a cache on disk and can be generated from a script.
* A script hook is added on file upload. It may be used to generate visualizations or publish system packages to a repository.
* The 404 file not found page tries to be more informative.
* The build page for a unikernel build displays the solo5 device manifest, e.g. `with block devices "storage", and net devices "service"`.
* URLs with trailing slash redirect to without the trailing slash.
* Builder-web will try to be more helpful if its database doesn't exist or the database version is wrong.
* The opam diff works for mirage 4 unikernels taking into account the opam-monorepo/duniverse packages.
* Markdown rendering is now done using cmarkit instead of omd.
* Builder-web doesn't display jobs older than 30 days (customizable with `--expired-jobs` command line argument) on the front page.
* Build artifacts are stored by their content, and artifacts are automatically deduplicated. This makes builder-web much more space efficient on deployments that don't use deduplication on the filesystem level.
* New subcommands `builder-db vacuum *` to remove older builds. Can be called from a cron job to keep disk usage bounded.
* Lots of other improvements and bug fixes.
## v0.1.0 (2021-11-12)
* Initial public release

View file

@ -1,13 +1,13 @@
# Builder-web - a web frontend for reproducible builds
Builder-web takes in submissions of builds, typically from [builder](https://github.com/roburio/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
Produced binaries can be downloaded and executed.
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
## Overview
Builder-web is a single binary web server using a sqlite3 database with versioned schemas.
Finished builds from [builder](https://github.com/roburio/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user.
Finished builds from [builder](https://github.com/robur-coop/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user.
Users can:
* Get an overview of *jobs* - a job is typically script or opam package that is run and builds an artifact,

View file

@ -12,9 +12,9 @@ let scrypt_params ?(scrypt_n = 16384) ?(scrypt_r = 8) ?(scrypt_p = 1) () =
{ scrypt_n; scrypt_r; scrypt_p }
type pbkdf2_sha256 =
[ `Pbkdf2_sha256 of Cstruct.t * Cstruct.t * pbkdf2_sha256_params ]
[ `Pbkdf2_sha256 of string * string * pbkdf2_sha256_params ]
type scrypt = [ `Scrypt of Cstruct.t * Cstruct.t * scrypt_params ]
type scrypt = [ `Scrypt of string * string * scrypt_params ]
type password_hash = [ pbkdf2_sha256 | scrypt ]
@ -25,10 +25,10 @@ type 'a user_info = {
}
let pbkdf2_sha256 ~params:{ pbkdf2_sha256_iter = count } ~salt ~password =
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password
let scrypt ~params:{ scrypt_n = n; scrypt_r = r; scrypt_p = p } ~salt ~password =
Scrypt_kdf.scrypt_kdf ~n ~r ~p ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
Scrypt.scrypt ~n ~r ~p ~dk_len:32l ~salt ~password
let hash ?(scrypt_params=scrypt_params ())
~username ~password ~restricted () =
@ -43,10 +43,10 @@ let hash ?(scrypt_params=scrypt_params ())
let verify_password password user_info =
match user_info.password_hash with
| `Pbkdf2_sha256 (password_hash, salt, params) ->
Cstruct.equal
String.equal
(pbkdf2_sha256 ~params ~salt ~password)
password_hash
| `Scrypt (password_hash, salt, params) ->
Cstruct.equal
String.equal
(scrypt ~params ~salt ~password)
password_hash

View file

@ -1,3 +1,3 @@
(library
(name builder_web_auth)
(libraries pbkdf scrypt-kdf mirage-crypto-rng))
(libraries kdf.pbkdf kdf.scrypt mirage-crypto-rng))

View file

@ -16,6 +16,13 @@ let defer_foreign_keys =
Caqti_type.unit ->. Caqti_type.unit @@
"PRAGMA defer_foreign_keys = ON"
let build_artifacts_to_orphan =
Builder_db.Rep.id `build ->* Caqti_type.octets @@
{| SELECT a.sha256 FROM build_artifact a
WHERE a.build = ? AND
(SELECT COUNT(*) FROM build_artifact a2
WHERE a2.sha256 = a.sha256 AND a2.build <> a.build) = 0 |}
let connect uri =
let* (module Db : Caqti_blocking.CONNECTION) = Caqti_blocking.connect ~tweaks_version:(1,8) uri in
let* () = Db.exec defer_foreign_keys () in
@ -36,6 +43,16 @@ let do_migrate dbpath =
let migrate () dbpath =
or_die 1 (do_migrate dbpath)
let artifacts_dir datadir = Fpath.(datadir / "_artifacts")
let artifact_path sha256 =
let sha256 = Ohex.encode sha256 in
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
(* NOTE: We add the prefix to reduce the number of files in a directory - a
workaround for inferior filesystems. We can easily revert this by changing
this function and adding a migration. *)
let prefix = String.sub sha256 0 2 in
Fpath.(v "_artifacts" / prefix / sha256)
let user_mod action dbpath scrypt_n scrypt_r scrypt_p username unrestricted =
let scrypt_params = Builder_web_auth.scrypt_params ?scrypt_n ?scrypt_r ?scrypt_p () in
let r =
@ -91,7 +108,7 @@ let user_disable () dbpath username =
match user with
| None -> Error (`Msg "user not found")
| Some (_, user_info) ->
let password_hash = `Scrypt (Cstruct.empty, Cstruct.empty, Builder_web_auth.scrypt_params ()) in
let password_hash = `Scrypt ("", "", Builder_web_auth.scrypt_params ()) in
let user_info = { user_info with password_hash ; restricted = true } in
Db.exec Builder_db.User.update_user user_info
in
@ -131,6 +148,26 @@ let access_remove () dbpath username jobname =
in
or_die 1 r
let delete_build datadir (module Db : Caqti_blocking.CONNECTION) jobname id uuid =
let dir = Fpath.(v datadir / jobname / Uuidm.to_string uuid) in
(match Bos.OS.Dir.delete ~recurse:true dir with
| Ok _ -> ()
| Error `Msg e -> Logs.warn (fun m -> m "failed to remove build directory %a: %s" Fpath.pp dir e));
let* () =
Db.iter_s build_artifacts_to_orphan
(fun sha256 ->
let p = Fpath.(v datadir // artifact_path sha256) in
match Bos.OS.Path.delete p with
| Ok () -> Ok ()
| Error `Msg e ->
Logs.warn (fun m -> m "failed to remove orphan artifact %a: %s"
Fpath.pp p e);
Ok ())
id
in
let* () = Db.exec Builder_db.Build_artifact.remove_by_build id in
Db.exec Builder_db.Build.remove id
let job_remove () datadir jobname =
let dbpath = datadir ^ "/builder.sqlite3" in
let r =
@ -150,12 +187,7 @@ let job_remove () datadir jobname =
let* () =
List.fold_left (fun r (build_id, build) ->
let* () = r in
let dir = Fpath.(v datadir / jobname / Uuidm.to_string build.Builder_db.Build.uuid) in
(match Bos.OS.Dir.delete ~recurse:true dir with
| Ok _ -> ()
| Error `Msg e -> Logs.warn (fun m -> m "failed to remove build directory %a: %s" Fpath.pp dir e));
let* () = Db.exec Builder_db.Build_artifact.remove_by_build build_id in
Db.exec Builder_db.Build.remove build_id)
delete_build datadir (module Db) jobname build_id build.Builder_db.Build.uuid)
(Ok ())
builds
in
@ -173,13 +205,103 @@ let job_remove () datadir jobname =
in
or_die 1 r
let vacuum datadir (module Db : Caqti_blocking.CONNECTION) platform_opt job_id predicate =
let* jobname = Db.find Builder_db.Job.get job_id in
let* builds =
match predicate with
| `Date older_than ->
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, platform_opt, older_than)
| `Latest latest_n ->
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, platform_opt, latest_n)
| `Latest_successful latest_n ->
let* latest_n =
Db.find_opt Builder_db.Build.get_nth_latest_successful
(job_id, platform_opt, latest_n)
in
match latest_n with
| None ->
Ok []
| Some (id, latest_n) ->
let+ builds =
Db.collect_list Builder_db.Build.get_builds_older_than
(job_id, platform_opt, latest_n.finish)
in
(* Unfortunately, get_builds_older_than is non-strict comparison;
so we need to filter out [latest_n]. *)
List.filter (fun (id', _) -> id <> id') builds
in
let pp_reason ppf = function
| `Date older_than ->
Format.fprintf ppf "has no builds older than %a" (Ptime.pp_rfc3339 ()) older_than
| `Latest n ->
Format.fprintf ppf "has fewer than %d builds" n
| `Latest_successful n ->
Format.fprintf ppf "has fewer than %d successful builds" n
in
if builds = [] then
(* NOTE: this function may be called on *all* jobs, and in that case maybe
this is too verbose? *)
Logs.info (fun m -> m "Job %s %a; not removing any builds"
jobname pp_reason predicate);
List.fold_left (fun r (build_id, build) ->
let* () = r in
let* () = Db.start () in
let* () = Db.exec defer_foreign_keys () in
match
delete_build datadir (module Db) jobname build_id
build.Builder_db.Build.uuid
with
| Ok () -> Db.commit ()
| Error _ as e ->
let* () = Db.rollback () in
e)
(Ok ())
builds
let vacuum () datadir platform_opt jobnames predicate =
let dbpath = datadir ^ "/builder.sqlite3" in
let r =
let* (module Db : Caqti_blocking.CONNECTION) =
connect (Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
in
let* jobs =
match jobnames with
| [] ->
(* We default to all jobs if no jobnames were specified *)
let* jobs = Db.collect_list Builder_db.Job.get_all_with_section_synopsis () in
Ok (List.map (fun (job_id, _, _, _) -> job_id) jobs)
| _ :: _ ->
let* (jobs, unknown_jobnames) =
List.fold_left
(fun r jobname ->
let* (jobs, unknown_jobnames) = r in
let* job_id_opt = Db.find_opt Builder_db.Job.get_id_by_name jobname in
match job_id_opt with
| Some job_id -> Ok (job_id :: jobs, unknown_jobnames)
| None -> Ok (jobs, jobname :: unknown_jobnames))
(Ok ([], []))
jobnames
in
match unknown_jobnames with
| [] -> Ok jobs
| _ :: _ ->
Error (`Msg ("Unknown job(s): " ^ String.concat ", " unknown_jobnames))
in
List.fold_left (fun r jobid ->
let* () = r in
vacuum datadir (module Db) platform_opt jobid predicate)
(Ok ())
jobs
in
or_die 1 r
let input_ids =
Caqti_type.unit ->* Builder_db.Rep.cstruct @@
Caqti_type.unit ->* Caqti_type.octets @@
"SELECT DISTINCT input_id FROM build WHERE input_id IS NOT NULL"
let main_artifact_hash =
Builder_db.Rep.cstruct ->*
Caqti_type.tup3 Builder_db.Rep.cstruct Builder_db.Rep.uuid Caqti_type.string @@
Caqti_type.octets ->*
Caqti_type.t3 Caqti_type.octets Builder_db.Rep.uuid Caqti_type.string @@
{|
SELECT a.sha256, b.uuid, j.name FROM build_artifact a, build b, job j
WHERE b.input_id = ? AND a.id = b.main_binary AND b.job = j.id
@ -197,12 +319,12 @@ let verify_input_id () dbpath =
match hashes with
| (h, uuid, jobname) :: tl ->
List.iter (fun (h', uuid', _) ->
if Cstruct.equal h h' then
if String.equal h h' then
()
else
Logs.warn (fun m -> m "job %s input id %a with two different hashes (%a, %a), build %a and %a"
jobname Cstruct.hexdump_pp input_id
Cstruct.hexdump_pp h Cstruct.hexdump_pp h'
jobname Ohex.pp input_id
Ohex.pp h Ohex.pp h'
Uuidm.pp uuid Uuidm.pp uuid'))
tl
| [] -> ())
@ -214,18 +336,17 @@ let num_build_artifacts =
Caqti_type.unit ->! Caqti_type.int @@
"SELECT count(*) FROM build_artifact"
let build_artifacts : (unit, string * Uuidm.t * (Fpath.t * Fpath.t * Cstruct.t * int64), [ `One | `Zero | `Many ]) Caqti_request.t =
let build_artifacts : (unit, string * Uuidm.t * Fpath.t * string * int64, [ `One | `Zero | `Many ]) Caqti_request.t =
Caqti_type.unit ->*
Caqti_type.(tup3 string Builder_db.Rep.uuid
(tup4 Builder_db.Rep.fpath Builder_db.Rep.fpath Builder_db.Rep.cstruct int64))
Caqti_type.(t5 string Builder_db.Rep.uuid Builder_db.Rep.fpath octets int64)
@@
{| SELECT job.name, b.uuid, a.filepath, a.localpath, a.sha256, a.size
{| SELECT job.name, b.uuid, a.filepath, a.sha256, a.size
FROM build_artifact a, build b, job
WHERE a.build = b.id AND b.job = job.id |}
let script_and_console : (unit, _, [`One | `Zero | `Many ]) Caqti_request.t =
Caqti_type.unit ->*
Caqti_type.(tup4 string Builder_db.Rep.uuid Builder_db.Rep.fpath Builder_db.Rep.fpath)
Caqti_type.(t4 string Builder_db.Rep.uuid Builder_db.Rep.fpath Builder_db.Rep.fpath)
@@
{| SELECT job.name, b.uuid, b.console, b.script
FROM build b, job
@ -257,36 +378,33 @@ let verify_data_dir () datadir =
let idx = ref 0 in
fun () -> incr idx; if !idx mod 100 = 0 then Logs.info (fun m -> m "%d" !idx);
in
let verify_job_and_uuid ?fpath job uuid path =
let verify_job_and_uuid job uuid path =
match Fpath.segs path with
| job' :: uuid' :: tl ->
| job' :: uuid' :: _tl ->
if String.equal job job' then () else Logs.warn (fun m -> m "job names do not match: %s vs %s" job job');
if String.equal (Uuidm.to_string uuid) uuid' then () else Logs.warn (fun m -> m "uuid does not match: %s vs %s" (Uuidm.to_string uuid) uuid');
(match fpath, tl with
| None, _ -> ()
| Some f, "output" :: tl ->
if Fpath.equal (Fpath.v (String.concat "/" tl)) f then
()
else
Logs.err (fun m -> m "path (%a) and fpath (%a) do not match" Fpath.pp path Fpath.pp f)
| Some _, _ ->
Logs.err (fun m -> m "path is not of form <job>/<uuid>/output/<filename>: %a" Fpath.pp path))
| _ -> Logs.err (fun m -> m "path is not of form <job>/<uuid>/...: %a" Fpath.pp path)
in
let* () =
Db.iter_s build_artifacts (fun (job, uuid, (fpath, lpath, sha, size)) ->
Db.iter_s build_artifacts (fun (_job, _uuid, _fpath, sha256, size) ->
progress ();
verify_job_and_uuid ~fpath job uuid lpath;
let abs_path = Fpath.(v datadir // lpath) in
(match Bos.OS.File.read abs_path with
| Error (`Msg msg) -> Logs.err (fun m -> m "file %a not present: %s" Fpath.pp abs_path msg)
| Ok data ->
files_tracked := FpathSet.add lpath !files_tracked;
let s = Int64.of_int (String.length data) in
if s <> size then Logs.err (fun m -> m "File %a has different size (in DB %Lu on disk %Lu)" Fpath.pp abs_path size s);
let sh = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
if not (Cstruct.equal sha sh) then Logs.err (fun m -> m "File %a has different hash (in DB %a on disk %a" Fpath.pp abs_path Cstruct.hexdump_pp sha Cstruct.hexdump_pp sh)) ;
Ok ()
if not (FpathSet.mem (artifact_path sha256) !files_tracked) then
let abs_path = Fpath.(v datadir // artifact_path sha256) in
(match Bos.OS.File.read abs_path with
| Error (`Msg msg) -> Logs.err (fun m -> m "file %a not present: %s" Fpath.pp abs_path msg)
| Ok data ->
files_tracked := FpathSet.add (artifact_path sha256) !files_tracked;
let s = Int64.of_int (String.length data) in
if s <> size then Logs.err (fun m -> m "File %a has different size (in DB %Lu on disk %Lu)" Fpath.pp abs_path size s);
let sha256' = Digestif.SHA256.(to_raw_string (digest_string data)) in
if not (String.equal sha256 sha256') then
Logs.err (fun m -> m "File %a has different hash (in DB %a on disk %a)"
Fpath.pp abs_path
Ohex.pp sha256
Ohex.pp sha256')) ;
Ok ()
else
Ok ()
) ()
in
Db.iter_s script_and_console (fun (job, uuid, console, script) ->
@ -306,7 +424,7 @@ let verify_data_dir () datadir =
files_untracked;
or_die 1 r
module Verify_cache_dir = struct
module Verify_cache_dir = struct
let verify_dir_exists d =
let* dir_exists = Bos.OS.Dir.exists d in
@ -322,7 +440,7 @@ module Verify_cache_dir = struct
let string_is_int s = match int_of_string_opt s with
| None -> false
| Some _ -> true
let verify_cache_subdir ~cachedir d =
match Bos.OS.Dir.exists Fpath.(cachedir // d) with
| Ok false -> ()
@ -337,7 +455,7 @@ module Verify_cache_dir = struct
let prefix = viz_prefix ^ "_" in
let has_prefix = String.starts_with ~prefix dir_str in
let has_valid_ending =
if not has_prefix then false else
if not has_prefix then false else
let ending =
String.(sub dir_str
(length prefix)
@ -353,7 +471,7 @@ module Verify_cache_dir = struct
m "Invalid cache subdirectory name: '%s'" dir_str)
let get_latest_viz_version viz_typ =
let* v_str, run_status = begin match viz_typ with
let* v_str, run_status = begin match viz_typ with
| `Treemap ->
let cmd = Bos.Cmd.(v "modulectomy" % "--version") in
Bos.OS.Cmd.(cmd |> run_out |> out_string)
@ -362,7 +480,7 @@ module Verify_cache_dir = struct
Bos.OS.Cmd.(cmd |> run_out |> out_string)
end in
match run_status with
| (cmd_info, `Exited 0) ->
| (cmd_info, `Exited 0) ->
begin try Ok (int_of_string v_str) with Failure _ ->
let msg =
Fmt.str "Couldn't parse latest version from %a: '%s'"
@ -372,7 +490,7 @@ module Verify_cache_dir = struct
Error (`Msg msg)
end
| (cmd_info, _) ->
let msg =
let msg =
Fmt.str "Error running visualization cmd: '%a'"
Bos.Cmd.pp (Bos.OS.Cmd.run_info_cmd cmd_info)
in
@ -426,8 +544,8 @@ module Verify_cache_dir = struct
type t = {
uuid : Uuidm.t;
job_name : string;
hash_opam_switch : Cstruct.t option;
hash_debug_bin : Cstruct.t option;
hash_opam_switch : string option;
hash_debug_bin : string option;
}
let repr =
@ -438,11 +556,11 @@ module Verify_cache_dir = struct
in
Caqti_type.custom ~encode ~decode
Caqti_type.(
tup4
t4
Builder_db.Rep.uuid
string
(option Builder_db.Rep.cstruct)
(option Builder_db.Rep.cstruct))
(option octets)
(option octets))
end
@ -454,12 +572,13 @@ module Verify_cache_dir = struct
ba_opam_switch.sha256 hash_opam_switch,
ba_debug_bin.sha256 hash_debug_bin
FROM build AS b
WHERE b.main_binary IS NOT NULL
LEFT JOIN build_artifact AS ba_opam_switch ON
ba_opam_switch.build = b.id
AND ba_opam_switch.filepath = 'opam-switch'
LEFT JOIN build_artifact AS ba_debug_bin ON
ba_debug_bin.build = b.id
AND ba_debug_bin.localpath LIKE '%.debug'
AND ba_debug_bin.filepath LIKE '%.debug'
|}
let check_viz_nonempty ~cachedir ~viz_typ ~hash =
@ -467,7 +586,7 @@ module Verify_cache_dir = struct
let* latest_version =
Viz_aux.get_viz_version_from_dirs ~cachedir ~viz_typ
in
let `Hex viz_input_hash = Hex.of_cstruct hash in
let viz_input_hash = Ohex.encode hash in
let* viz_path =
Viz_aux.choose_versioned_viz_path
~cachedir
@ -482,7 +601,7 @@ module Verify_cache_dir = struct
let verify_viz_file_vizdeps ~cachedir build =
match build.Build.hash_opam_switch with
| None ->
| None ->
Logs.warn (fun m ->
m "%s: uuid '%a': Doesn't support dependencies viz because of \
missing 'opam-switch'"
@ -491,7 +610,7 @@ module Verify_cache_dir = struct
| Some hash_opam_switch ->
match
check_viz_nonempty
~cachedir
~cachedir
~viz_typ:`Dependencies
~hash:hash_opam_switch
with
@ -512,7 +631,7 @@ module Verify_cache_dir = struct
~cachedir
~viz_typ:`Treemap
~hash:hash_debug_bin
with
with
| Ok () -> ()
| Error (`Msg err) ->
Logs.warn (fun m ->
@ -547,7 +666,7 @@ module Verify_cache_dir = struct
match extract_hash ~viz_typ build with
| None -> ()
| Some input_hash ->
let `Hex input_hash = Hex.of_cstruct input_hash in
let input_hash = Ohex.encode input_hash in
let viz_path = Viz_aux.viz_path
~cachedir
~viz_typ
@ -567,18 +686,18 @@ module Verify_cache_dir = struct
Fpath.pp viz_path)
type msg = [ `Msg of string ]
let open_error_msg : ('a, msg) result -> ('a, [> msg]) result =
function
| Ok _ as v -> v
| Error e -> Error (e : msg :> [> msg])
let verify () datadir cachedir =
let module Viz_aux = Builder_web.Viz_aux in
begin
let* datadir = Fpath.of_string datadir |> open_error_msg in
let* cachedir = match cachedir with
| Some d -> Fpath.of_string d |> open_error_msg
| Some d -> Fpath.of_string d |> open_error_msg
| None -> Ok Fpath.(datadir / "_cache")
in
let* () = verify_dir_exists cachedir in
@ -621,8 +740,8 @@ end
module Asn = struct
let decode_strict codec cs =
match Asn.decode codec cs with
| Ok (a, cs) ->
if Cstruct.length cs = 0
| Ok (a, rest) ->
if String.length rest = 0
then Ok a
else Error "trailing bytes"
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
@ -680,16 +799,16 @@ let extract_full () datadir dest uuid =
let out = console_of_string console in
let* artifacts = Db.collect_list Builder_db.Build_artifact.get_all_by_build build_id in
let* data =
List.fold_left (fun acc (_, { Builder_db.filepath; localpath; _ }) ->
List.fold_left (fun acc (_, { Builder_db.filepath; sha256; _ }) ->
let* acc = acc in
let* data = Bos.OS.File.read Fpath.(v datadir // localpath) in
let* data = Bos.OS.File.read Fpath.(v datadir // artifact_path sha256) in
Ok ((filepath, data) :: acc))
(Ok [])
artifacts
in
let exec = (job, uuid, out, start, finish, result, data) in
let cs = Builder.Asn.exec_to_cs exec in
Bos.OS.File.write (Fpath.v dest) (Cstruct.to_string cs)
let data = Builder.Asn.exec_to_str exec in
Bos.OS.File.write (Fpath.v dest) data
in
or_die 1 r
@ -701,81 +820,89 @@ let help man_format cmds = function
else `Error (true, "Unknown command: " ^ cmd)
let dbpath =
let doc = "sqlite3 database path" in
let doc = "sqlite3 database path." in
Cmdliner.Arg.(value &
opt non_dir_file (Builder_system.default_datadir ^ "/builder.sqlite3") &
info ~doc ["dbpath"])
let dbpath_new =
let doc = "sqlite3 database path" in
let doc = "sqlite3 database path." in
Cmdliner.Arg.(value &
opt string (Builder_system.default_datadir ^ "/builder.sqlite3") &
info ~doc ["dbpath"])
let datadir =
let doc = "data directory" in
let doc = "Data directory." in
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
Cmdliner.Arg.(value &
opt dir Builder_system.default_datadir &
info ~doc ["datadir"; "d"])
info ~doc ~env ["datadir"; "d"])
let cachedir =
let doc = "cache directory" in
let doc = "Cache directory." in
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_CACHEDIR" in
Cmdliner.Arg.(value &
opt (some dir) None &
info ~doc ["cachedir"])
info ~doc ~env ["cachedir"])
let jobname =
let doc = "jobname" in
let doc = "Jobname." in
Cmdliner.Arg.(required &
pos 0 (some string) None &
info ~doc ~docv:"JOBNAME" [])
let username =
let doc = "username" in
let doc = "Username." in
Cmdliner.Arg.(required &
pos 0 (some string) None &
info ~doc ~docv:"USERNAME" [])
let password_iter =
let doc = "password hash count" in
let doc = "Password hash count." in
Cmdliner.Arg.(value &
opt (some int) None &
info ~doc ["hash-count"])
let scrypt_n =
let doc = "scrypt n parameter" in
let doc = "scrypt n parameter." in
Cmdliner.Arg.(value &
opt (some int) None &
info ~doc ["scrypt-n"])
let scrypt_r =
let doc = "scrypt r parameter" in
let doc = "scrypt r parameter." in
Cmdliner.Arg.(value &
opt (some int) None &
info ~doc ["scrypt-r"])
let scrypt_p =
let doc = "scrypt p parameter" in
let doc = "scrypt p parameter." in
Cmdliner.Arg.(value &
opt (some int) None &
info ~doc ["scrypt-p"])
let unrestricted =
let doc = "unrestricted user" in
let doc = "Unrestricted user." in
Cmdliner.Arg.(value & flag & info ~doc [ "unrestricted" ])
let job =
let doc = "job" in
let doc = "Job." in
Cmdliner.Arg.(required &
pos 1 (some string) None &
info ~doc ~docv:"JOB" [])
let build =
let doc = "build uuid" in
let doc = "Build uuid." in
Cmdliner.Arg.(required &
pos 0 (some string) None &
info ~doc ~docv:"BUILD" [])
let platform =
let doc = "Platform." in
Cmdliner.Arg.(value &
opt (some string) None &
info ~doc ~docv:"PLATFORM" ["platform"])
let full_dest =
let doc = "path to write build file" in
Cmdliner.Arg.(value & opt string "full" &
@ -849,6 +976,102 @@ let job_remove_cmd =
let info = Cmd.info ~doc "job-remove" in
Cmd.v info term
let vacuum_cmd =
let jobs =
Arg.(value & opt_all string [] & info ~doc:"Job(s). Can be passed multiple times." ~docv:"JOB" ["job"])
in
let ptime_conv =
let parse s =
match Ptime.of_rfc3339 s with
| Ok (ptime, (None | Some 0), _) ->
Ok (`Date ptime)
| Ok _ -> Error (`Msg "only UTC timezone is allowed")
| Error `RFC3339 (_range, e) ->
Error (`Msg (Format.asprintf "bad RFC3339 date-time: %a" Ptime.pp_rfc3339_error e))
and pp ppf (`Date ptime) =
Ptime.pp_rfc3339 () ppf ptime
in
Arg.conv (parse, pp)
in
let older_than =
let doc = "cut-off date-time" in
Arg.(required & pos 0 (some ptime_conv) None & info ~doc ~docv:"OLDER-THAN" [])
in
(* TODO(reynir): for now we disallow 0 so as to avoid ending up with jobs
without builds. I'm unsure how well builder-web works with empty jobs.
Then again we don't do this check for older-than... *)
let latest_n =
let doc = "latest N" in
let latest_n =
let parse s =
match Arg.(conv_parser int) s with
| Ok n when n > 0 -> Ok (`Latest n)
| Ok _ -> Error (`Msg "must be positive integer")
| Error _ as e -> e
and pp ppf (`Latest n) =
Arg.(conv_printer int) ppf n
in
Arg.conv (parse, pp)
in
Arg.(required & pos 0 (some latest_n) None & info ~doc ~docv:"LATEST-N" [])
in
let latest_n_succesful =
let doc = "latest N successful" in
let latest_n =
let parse s =
match Arg.(conv_parser int) s with
| Ok n when n > 0 -> Ok (`Latest_successful n)
| Ok _ -> Error (`Msg "must be positive integer")
| Error _ as e -> e
and pp ppf (`Latest_successful n) =
Arg.(conv_printer int) ppf n
in
Arg.conv (parse, pp)
in
Arg.(required & pos 0 (some latest_n) None & info ~doc ~docv:"LATEST-N" [])
in
let job_default_txt =
"By default all jobs are vacuumed, unless any jobs are specified using --job."
in
let vacuum_older_than =
let doc =
Printf.sprintf "Remove builds older than a date. %s" job_default_txt
in
let info = Cmd.info ~doc "older-than" in
let term =
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ older_than)
in
Cmd.v info term
in
let vacuum_except_latest_n =
let doc =
Printf.sprintf "Remove all builds except for the latest N builds (successful or not). %s"
job_default_txt
in
let info = Cmd.info ~doc "except-latest" in
let term =
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ latest_n)
in
Cmd.v info term
in
let vacuum_except_latest_n_successful =
let doc =
Printf.sprintf "Remove all builds except for builds newer than the Nth latest successful build. %s"
job_default_txt
in
let info = Cmd.info ~doc "except-latest-successful" in
let term =
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ latest_n_succesful)
in
Cmd.v info term
in
let doc = "Remove old builds" in
Cmd.group (Cmd.info ~doc "vacuum") [
vacuum_older_than;
vacuum_except_latest_n;
vacuum_except_latest_n_successful;
]
let extract_full_cmd =
let doc = "extract a build from the database" in
let term = Term.(
@ -892,7 +1115,7 @@ let default_cmd, default_info =
Cmd.info ~doc "builder-db"
let () =
Mirage_crypto_rng_unix.initialize ();
Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna);
Cmdliner.Cmd.group
~default:default_cmd default_info
[ help_cmd; migrate_cmd;
@ -902,6 +1125,7 @@ let () =
verify_input_id_cmd;
verify_data_dir_cmd;
verify_cache_dir_cmd;
extract_full_cmd ]
extract_full_cmd;
vacuum_cmd ]
|> Cmdliner.Cmd.eval
|> exit

View file

@ -30,7 +30,7 @@ let write_raw s buf =
safe_close s >|= fun () ->
Error `Exception)
in
(* Logs.debug (fun m -> m "writing %a" Cstruct.hexdump_pp (Cstruct.of_bytes buf)) ; *)
(* Logs.debug (fun m -> m "writing %a" (Ohex.pp_hexdump ()) (Bytes.unsafe_to_string buf)) ; *)
w 0 (Bytes.length buf)
let process =
@ -81,28 +81,28 @@ let init_influx name data =
let run_batch_viz ~cachedir ~datadir ~configdir =
let open Rresult.R.Infix in
begin
let script = Fpath.(configdir / "batch-viz.sh")
let script = Fpath.(configdir / "batch-viz.sh")
and script_log = Fpath.(cachedir / "batch-viz.log")
and viz_script = Fpath.(configdir / "upload-hooks" / "visualizations.sh")
and viz_script = Fpath.(configdir / "upload-hooks" / "visualizations.sh")
in
Bos.OS.File.exists script >>= fun script_exists ->
if not script_exists then begin
Logs.warn (fun m -> m "Didn't find %s" (Fpath.to_string script));
Ok ()
end else
let args =
let args =
[ "--cache-dir=" ^ Fpath.to_string cachedir;
"--data-dir=" ^ Fpath.to_string datadir;
"--viz-script=" ^ Fpath.to_string viz_script ]
|> List.map (fun s -> "\"" ^ String.escaped s ^ "\"")
|> String.concat " "
in
(*> Note: The reason for appending, is that else a new startup could
(*> Note: The reason for appending, is that else a new startup could
overwrite an existing running batch's log*)
(Fpath.to_string script ^ " " ^ args
^ " 2>&1 >> " ^ Fpath.to_string script_log
^ " &")
|> Sys.command
|> Sys.command
|> ignore
|> Result.ok
end
@ -113,13 +113,14 @@ let run_batch_viz ~cachedir ~datadir ~configdir =
m "Error while starting batch-viz.sh: %a"
Rresult.R.pp_msg err)
let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag =
let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag expired_jobs =
let dbpath = Printf.sprintf "%s/builder.sqlite3" datadir in
let datadir = Fpath.v datadir in
let cachedir =
cachedir |> Option.fold ~none:Fpath.(datadir / "_cache") ~some:Fpath.v
in
let configdir = Fpath.v configdir in
let () = Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna) in
let () = init_influx "builder-web" influx in
let () =
if run_batch_viz_flag then
@ -158,7 +159,7 @@ let setup_app level influx port host datadir cachedir configdir run_batch_viz_fl
let error_handler = Dream.error_template Builder_web.error_template in
Dream.initialize_log ?level ();
let dream_routes = Builder_web.(
routes ~datadir ~cachedir ~configdir
routes ~datadir ~cachedir ~configdir ~expired_jobs
|> to_dream_routes
)
in
@ -195,10 +196,11 @@ let ip_port : (Ipaddr.V4.t * int) Arg.conv =
let datadir =
let doc = "data directory" in
let docv = "DATA_DIR" in
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
Arg.(
value &
opt dir Builder_system.default_datadir &
info [ "d"; "datadir" ] ~doc ~docv
info ~env [ "d"; "datadir" ] ~doc ~docv
)
let cachedir =
@ -240,11 +242,15 @@ let run_batch_viz =
log is written to CACHE_DIR/batch-viz.log" in
Arg.(value & flag & info [ "run-batch-viz" ] ~doc)
let expired_jobs =
let doc = "Amount of days after which a job is considered to be inactive if \
no successful build has been achieved (use 0 for infinite)" in
Arg.(value & opt int 30 & info [ "expired-jobs" ] ~doc)
let () =
let term =
Term.(const setup_app $ Logs_cli.level () $ influx $ port $ host $ datadir $
cachedir $ configdir $ run_batch_viz)
cachedir $ configdir $ run_batch_viz $ expired_jobs)
in
let info = Cmd.info "Builder web" ~doc:"Builder web" ~man:[] in
Cmd.v info term

View file

@ -7,10 +7,13 @@
(public_name builder-web)
(name builder_web_app)
(modules builder_web_app)
(libraries builder_web builder_system mirage-crypto-rng.unix cmdliner logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr ipaddr.unix http_status_metrics))
(libraries builder_web builder_system mirage-crypto-rng.unix cmdliner
logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr
ipaddr.unix http_status_metrics))
(executable
(public_name builder-db)
(name builder_db_app)
(modules builder_db_app)
(libraries builder_web builder_db builder_system caqti.blocking uri bos fmt logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))
(libraries builder_web builder_db builder_system caqti.blocking uri bos fmt
logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))

View file

@ -61,9 +61,10 @@ let help man_format migrations = function
let datadir =
let doc = "data directory containing builder.sqlite3 and data files" in
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
Cmdliner.Arg.(value &
opt dir Builder_system.default_datadir &
info ~doc ["datadir"; "d"])
info ~env ~doc ["datadir"; "d"])
let setup_log =
let setup_log level =
@ -179,6 +180,8 @@ let () =
[ f20210910 ];
actions (module M20211105);
actions (module M20220509);
actions (module M20230911);
actions (module M20230914);
])
|> Cmd.eval
|> exit

View file

@ -1,4 +1,5 @@
(executable
(public_name builder-migrations)
(name builder_migrations)
(libraries builder_system builder_db caqti caqti-driver-sqlite3 caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))
(libraries builder_system builder_db caqti caqti-driver-sqlite3
caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))

View file

@ -18,11 +18,11 @@ let all_builds =
"SELECT id FROM build"
let bin_artifact =
Caqti_type.int64 ->* Caqti_type.(tup2 int64 string) @@
Caqti_type.int64 ->* Caqti_type.(t2 int64 string) @@
"SELECT id, filepath FROM build_artifact WHERE build = ? AND filepath LIKE 'bin/%'"
let set_main_binary =
Caqti_type.(tup2 int64 (option string)) ->. Caqti_type.unit @@
Caqti_type.(t2 int64 (option string)) ->. Caqti_type.unit @@
"UPDATE build SET main_binary = $2 WHERE id = $1"
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =

View file

@ -37,21 +37,21 @@ let new_build_file =
|}
let collect_build_artifact =
Caqti_type.unit ->* Caqti_type.(tup3 int64 (tup3 string string octets) int64) @@
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@
"SELECT id, filepath, localpath, sha256, build FROM build_artifact"
let collect_build_file =
Caqti_type.unit ->* Caqti_type.(tup3 int64 (tup3 string string octets) int64) @@
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@
"SELECT id, filepath, localpath, sha256, build FROM build_file"
let insert_new_build_artifact =
Caqti_type.(tup3 int64 (tup4 string string octets int64) int64) ->. Caqti_type.unit @@
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@
{| INSERT INTO new_build_artifact (id, filepath, localpath, sha256, size, build)
VALUES (?, ?, ?, ?, ?, ?)
|}
let insert_new_build_file =
Caqti_type.(tup3 int64 (tup4 string string octets int64) int64) ->. Caqti_type.unit @@
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@
{| INSERT INTO new_build_file (id, filepath, localpath, sha256, size, build)
VALUES (?, ?, ?, ?, ?, ?)
|}

View file

@ -3,7 +3,7 @@ module Rep = Builder_db.Rep
open Grej.Infix
let broken_builds =
Caqti_type.unit ->* Caqti_type.tup3 (Rep.id `build) Rep.uuid Caqti_type.string @@
Caqti_type.unit ->* Caqti_type.t3 (Rep.id `build) Rep.uuid Caqti_type.string @@
{| SELECT b.id, b.uuid, job.name FROM build b, job
WHERE result_kind = 0 AND result_code = 0 AND main_binary IS NOT NULL AND job.id = b.job AND
(SELECT COUNT( * ) FROM build_artifact a

View file

@ -7,11 +7,11 @@ let rollback_doc = "add datadir prefix to build_artifact.localpath"
open Grej.Infix
let build_artifacts =
Caqti_type.unit ->* Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@
Caqti_type.unit ->* Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@
"SELECT id, localpath FROM build_artifact"
let build_artifact_update_localpath =
Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@
Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@
"UPDATE build_artifact SET localpath = $2 WHERE id = $1"
(* We are not migrating build_file because it is unused *)

View file

@ -54,20 +54,20 @@ let old_build =
let collect_old_build =
Caqti_type.unit ->*
Caqti_type.(tup3 Builder_db.Rep.untyped_id
(tup3 (tup4 string int64 int64 int64)
(tup4 int64 int (option int) (option string))
(tup3 octets string (option string)))
Caqti_type.(t3 Builder_db.Rep.untyped_id
(t3 (t4 string int64 int64 int64)
(t4 int64 int (option int) (option string))
(t3 octets string (option string)))
Builder_db.Rep.untyped_id) @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
console, script, main_binary, job
FROM build |}
let insert_new_build =
Caqti_type.(tup3 Builder_db.Rep.untyped_id
(tup3 (tup4 string int64 int64 int64)
(tup4 int64 int (option int) (option string))
(tup3 octets string (option Builder_db.Rep.untyped_id)))
Caqti_type.(t3 Builder_db.Rep.untyped_id
(t3 (t4 string int64 int64 int64)
(t4 int64 int (option int) (option string))
(t3 octets string (option Builder_db.Rep.untyped_id)))
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
result_code, result_msg, console, script, main_binary, job)
@ -82,7 +82,7 @@ let rename_build =
"ALTER TABLE new_build RENAME TO build"
let find_main_artifact_id =
Caqti_type.(tup2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@
Caqti_type.(t2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@
"SELECT id FROM build_artifact WHERE build = $1 AND filepath = $2"
let find_main_artifact_filepath =
@ -91,20 +91,20 @@ let find_main_artifact_filepath =
let collect_new_build =
Caqti_type.unit ->*
Caqti_type.(tup3 Builder_db.Rep.untyped_id
(tup3 (tup4 string int64 int64 int64)
(tup4 int64 int (option int) (option string))
(tup3 octets string (option Builder_db.Rep.untyped_id)))
Caqti_type.(t3 Builder_db.Rep.untyped_id
(t3 (t4 string int64 int64 int64)
(t4 int64 int (option int) (option string))
(t3 octets string (option Builder_db.Rep.untyped_id)))
Builder_db.Rep.untyped_id) @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
console, script, main_binary, job
FROM build |}
let insert_old_build =
Caqti_type.(tup3 Builder_db.Rep.untyped_id
(tup3 (tup4 string int64 int64 int64)
(tup4 int64 int (option int) (option string))
(tup3 octets string (option string)))
Caqti_type.(t3 Builder_db.Rep.untyped_id
(t3 (t4 string int64 int64 int64)
(t4 int64 int (option int) (option string))
(t3 octets string (option string)))
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
result_code, result_msg, console, script, main_binary, job)

View file

@ -34,21 +34,21 @@ let old_user =
let collect_old_user =
Caqti_type.unit ->*
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64)) @@
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) @@
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p FROM user"
let collect_new_user =
Caqti_type.unit ->*
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool)) @@
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) @@
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted FROM user"
let insert_new_user =
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool)) ->.
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) ->.
Caqti_type.unit @@
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
let insert_old_user =
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64)) ->.
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) ->.
Caqti_type.unit @@
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p) VALUES (?, ?, ?, ?, ?, ?, ?)"

View file

@ -42,7 +42,7 @@ let latest_successful_build =
let build_artifacts =
Builder_db.Rep.untyped_id ->*
Caqti_type.tup2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
{| SELECT a.filepath, a.localpath
FROM build_artifact a
WHERE a.build = ?
@ -106,7 +106,7 @@ let insert_tag =
"INSERT INTO tag (tag) VALUES (?)"
let insert_job_tag =
Caqti_type.(tup3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
Caqti_type.unit @@
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"

View file

@ -20,7 +20,7 @@ let latest_successful_build =
let build_artifacts =
Builder_db.Rep.untyped_id ->*
Caqti_type.tup2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
{| SELECT a.filepath, a.localpath
FROM build_artifact a
WHERE a.build = ?
@ -31,7 +31,7 @@ let insert_tag =
"INSERT INTO tag (tag) VALUES (?)"
let insert_job_tag =
Caqti_type.(tup3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
Caqti_type.unit @@
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"

View file

@ -55,11 +55,11 @@ let drop_input_id_from_build =
let builds =
Caqti_type.unit ->*
Caqti_type.tup4
Caqti_type.t4
Builder_db.Rep.untyped_id
Builder_db.Rep.cstruct
Builder_db.Rep.cstruct
Builder_db.Rep.cstruct @@
Caqti_type.octets
Caqti_type.octets
Caqti_type.octets @@
{| SELECT b.id, opam.sha256, env.sha256, system.sha256
FROM build b, build_artifact opam, build_artifact env, build_artifact system
WHERE opam.filepath = 'opam-switch' AND env.filepath = 'build-environment'
@ -68,7 +68,7 @@ let builds =
|}
let set_input_id =
Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.cstruct ->. Caqti_type.unit @@
Caqti_type.t2 Builder_db.Rep.untyped_id Caqti_type.octets ->. Caqti_type.unit @@
"UPDATE build SET input_id = $2 WHERE id = $1"
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
@ -76,7 +76,7 @@ let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
Db.exec add_input_id_to_build () >>= fun () ->
Db.collect_list builds () >>= fun builds ->
Grej.list_iter_result (fun (id, opam_sha, env_sha, pkg_sha) ->
let input_id = Mirage_crypto.Hash.SHA256.digest (Cstruct.concat [ opam_sha ; env_sha ; pkg_sha ]) in
let input_id = Digestif.SHA256.(to_raw_string (digestv_string [ opam_sha ; env_sha ; pkg_sha ])) in
Db.exec set_input_id (id, input_id))
builds >>= fun () ->
Db.exec (Grej.set_version new_version) ()

View file

@ -2,7 +2,7 @@ open Grej.Infix
let orb_left_in_builds =
Caqti_type.unit ->*
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
{| SELECT id, localpath FROM build_artifact
WHERE filepath = 'orb.deb' OR filepath = 'orb.txz'
|}

View file

@ -2,7 +2,7 @@ open Grej.Infix
let deb_debug_left_in_builds =
Caqti_type.unit ->*
Caqti_type.tup4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build)
Caqti_type.t4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build)
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
{| SELECT id, build, localpath, filepath FROM build_artifact
WHERE filepath LIKE '%.deb.debug'
@ -17,7 +17,7 @@ let get_localpath =
"SELECT localpath FROM build_artifact WHERE id = ?"
let update_paths =
Caqti_type.tup3 (Builder_db.Rep.id `build_artifact)
Caqti_type.t3 (Builder_db.Rep.id `build_artifact)
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
Caqti_type.unit @@
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"

View file

@ -2,7 +2,7 @@ open Grej.Infix
let all_builds_with_binary : (unit, [`build] Builder_db.Rep.id * [`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
Caqti_type.unit ->*
Caqti_type.tup4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact)
Caqti_type.t4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact)
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
"SELECT b.id, b.main_binary, a.filepath, a.localpath FROM build b, build_artifact a WHERE b.main_binary = a.id AND b.main_binary IS NOT NULL"
@ -11,14 +11,14 @@ let build_not_stripped : ([`build] Builder_db.Rep.id, [`build_artifact] Builder_
"SELECT id FROM build_artifact WHERE build = ? AND filepath LIKE '%.debug'"
let update_paths : ([`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, unit, [ `Zero ]) Caqti_request.t =
Caqti_type.tup3 (Builder_db.Rep.id `build_artifact)
Caqti_type.t3 (Builder_db.Rep.id `build_artifact)
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
Caqti_type.unit @@
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
let add_artifact : ((Fpath.t * Fpath.t * Cstruct.t) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t =
Caqti_type.(tup2 (tup3 Builder_db.Rep.fpath Builder_db.Rep.fpath Builder_db.Rep.cstruct)
(tup2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->.
let add_artifact : ((Fpath.t * Fpath.t * string) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t =
Caqti_type.(t2 (t3 Builder_db.Rep.fpath Builder_db.Rep.fpath Caqti_type.octets)
(t2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->.
Caqti_type.unit @@
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) VALUES (?, ?, ?, ?, ?)"
@ -48,7 +48,8 @@ let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
in
assert (r = 0);
Bos.OS.File.read (Fpath.append datadir artifact_lpath) >>= fun data ->
let size = Int64.of_int (String.length data) and sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
let size = Int64.of_int (String.length data)
and sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
Db.exec update_paths (artifact_id, new_artifact_lpath, path artifact_fpath) >>= fun () ->
Db.exec add_artifact ((artifact_fpath, artifact_lpath, sha256), (size, build_id)) >>= fun () ->
Db.find Builder_db.last_insert_rowid () >>= fun new_build_artifact_id ->

View file

@ -2,11 +2,11 @@ open Grej.Infix
let all_build_artifacts_with_dot_slash : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
Caqti_type.unit ->*
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
"SELECT id, filepath FROM build_artifact WHERE filepath LIKE './%'"
let update_path : ([`build_artifact] Builder_db.Rep.id * Fpath.t, unit, [< `Zero | `One | `Many > `Zero ]) Caqti_request.t =
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->.
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->.
Caqti_type.unit @@
"UPDATE build_artifact SET filepath = $2 WHERE id = $1"

View file

@ -40,11 +40,11 @@ let copy_old_build =
let old_build_execution_result =
Caqti_type.unit ->*
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@
"SELECT id, result_kind, result_code FROM build"
let update_new_build_execution_result =
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@
"UPDATE new_build SET result_code = $2 WHERE id = $1"
let old_build =
@ -83,11 +83,11 @@ let copy_new_build =
let new_build_execution_result =
Caqti_type.unit ->*
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int) @@
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) @@
"SELECT id, result_code FROM build"
let update_old_build_execution_result =
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->.
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->.
Caqti_type.unit @@
"UPDATE new_build SET result_kind = $2, result_code = $3 WHERE id = $1"

View file

@ -2,12 +2,12 @@ open Grej.Infix
let all_build_artifacts_like_hashes : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
Caqti_type.unit ->*
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%.build-hashes'"
let all_build_artifacts_like_readme : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
Caqti_type.unit ->*
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%README.md'"
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =

View file

@ -8,8 +8,8 @@ open Grej.Infix
module Asn = struct
let decode_strict codec cs =
match Asn.decode codec cs with
| Ok (a, cs) ->
if Cstruct.length cs = 0
| Ok (a, rest) ->
if String.length rest = 0
then Ok a
else Error "trailing bytes"
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
@ -95,24 +95,24 @@ let copy_from_new_build =
let old_build_console_script =
Caqti_type.unit ->*
Caqti_type.(tup4 (Builder_db.Rep.id (`build : [ `build ]))
(tup2 string Builder_db.Rep.uuid) Builder_db.Rep.cstruct string) @@
Caqti_type.(t4 (Builder_db.Rep.id (`build : [ `build ]))
(t2 string Builder_db.Rep.uuid) octets string) @@
"SELECT b.id, job.name, b.uuid, b.console, b.script FROM build b, job WHERE b.job = job.id"
let update_new_build_console_script =
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ]))
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ]))
Builder_db.Rep.fpath Builder_db.Rep.fpath) ->.
Caqti_type.unit @@
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
let new_build_console_script =
Caqti_type.unit ->*
Caqti_type.tup3 (Builder_db.Rep.id (`build : [ `build ]))
Caqti_type.t3 (Builder_db.Rep.id (`build : [ `build ]))
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
"SELECT id, console, script FROM build"
let update_old_build_console_script =
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) Builder_db.Rep.cstruct string) ->.
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) octets string) ->.
Caqti_type.unit @@
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"

View file

@ -2,13 +2,13 @@ open Grej.Infix
let mixups =
Caqti_type.unit ->*
Caqti_type.tup3 (Builder_db.Rep.id (`build : [`build]))
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build]))
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
"SELECT id, console, script FROM build \
WHERE console LIKE '%/script.txt' AND script LIKE '%/console.txt'"
let fixup =
Caqti_type.tup3 (Builder_db.Rep.id (`build : [`build]))
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build]))
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
Caqti_type.unit @@
"UPDATE build SET console = $2, script = $3 WHERE id = $1"

View file

@ -73,11 +73,11 @@ let copy_from_new_build =
|}
let build_id_and_user =
Caqti_type.unit ->* Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@
Caqti_type.unit ->* Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@
"SELECT id, user FROM build"
let update_new_build_platform =
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@
"UPDATE new_build SET platform = $2 WHERE id = $1"
let drop_build =

View file

@ -23,21 +23,21 @@ let new_uuid_rep =
let uuids_byte_encoded_q =
Caqti_type.unit ->*
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@
"SELECT id, uuid FROM build"
let uuids_hex_encoded_q =
Caqti_type.unit ->*
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@
"SELECT id, uuid FROM build"
let migrate_q =
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->.
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->.
Caqti_type.unit @@
"UPDATE build SET uuid = $2 WHERE id = $1"
let rollback_q =
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->.
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->.
Caqti_type.unit @@
"UPDATE build SET uuid = $2 WHERE id = $1"

View file

@ -0,0 +1,32 @@
let new_version = 17L and old_version = 16L
and identifier = "2023-09-11"
and migrate_doc = "index failed builds on main binary is null"
and rollback_doc = "index failed builds on exit code"
open Grej.Syntax
let drop_idx_build_failed =
Caqti_type.(unit ->. unit) @@
"DROP INDEX idx_build_failed"
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:old_version (module Db) in
let* () = Db.exec drop_idx_build_failed () in
let* () =
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
WHERE main_binary IS NULL")
()
in
Db.exec (Grej.set_version new_version) ()
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:new_version (module Db) in
let* () = Db.exec drop_idx_build_failed () in
let* () =
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
WHERE result_code <> 0")
()
in
Db.exec (Grej.set_version old_version) ()

162
bin/migrations/m20230914.ml Normal file
View file

@ -0,0 +1,162 @@
let new_version = 18L and old_version = 17L
and identifier = "2023-09-14"
and migrate_doc = "Artifacts are stored content-addressed in the filesystem"
and rollback_doc = "Artifacts are stored under their build's job name and uuid"
open Grej.Syntax
let new_build_artifact =
Caqti_type.unit ->. Caqti_type.unit @@
{| CREATE TABLE new_build_artifact (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
filepath TEXT NOT NULL,
sha256 BLOB NOT NULL,
size INTEGER NOT NULL,
build INTEGER NOT NULL,
FOREIGN KEY(build) REFERENCES build(id),
UNIQUE(build, filepath)
)
|}
let old_build_artifact =
Caqti_type.unit ->. Caqti_type.unit @@
{| CREATE TABLE new_build_artifact (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
filepath TEXT NOT NULL, -- the path as in the build
localpath TEXT NOT NULL, -- local path to the file on disk
sha256 BLOB NOT NULL,
size INTEGER NOT NULL,
build INTEGER NOT NULL,
FOREIGN KEY(build) REFERENCES build(id),
UNIQUE(build, filepath)
)
|}
let idx_build_artifact_sha256 =
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)"
let idx_build_artifact_build =
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)"
let copy_new_build_artifact =
Caqti_type.(unit ->. unit) @@
{| INSERT INTO new_build_artifact(id, filepath, sha256, size, build)
SELECT id, filepath, sha256, size, build
FROM build_artifact
|}
let copy_old_build_artifact =
Caqti_type.(unit ->. unit) @@
{| INSERT INTO new_build_artifact(id, filepath, localpath, sha256, size, build)
SELECT a.id, a.filepath,
j.name || '/' || b.uuid || '/output/' || a.filepath,
a.sha256, a.size, a.build
FROM build_artifact a, job j, build b
WHERE b.id = a.build AND j.id = b.job
|}
let new_build_artifact_paths =
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
{| SELECT localpath, '_artifacts/' || substr(lower(hex(sha256)), 1, 2) || '/' || lower(hex(sha256))
FROM build_artifact
|}
let old_build_artifact_paths =
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
{| SELECT '_artifacts/' || substr(lower(hex(a.sha256)), 1, 2) || '/' || lower(hex(a.sha256)),
j.name || '/' || b.uuid || '/output/' || a.filepath
FROM build_artifact a, job j, build b
WHERE b.id = a.build AND j.id = b.job
|}
let drop_build_artifact =
Caqti_type.(unit ->. unit) @@
"DROP TABLE build_artifact"
let rename_build_artifact =
Caqti_type.(unit ->. unit) @@
"ALTER TABLE new_build_artifact RENAME TO build_artifact"
let move_paths ?force datadir (old_path, new_path) =
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
Bos.OS.Path.move ?force old_path new_path
let copy_paths datadir (old_path, new_path) =
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
let new_path_tmp = Fpath.(new_path + "tmp") in
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
let cmd = Bos.Cmd.(v "cp" % p old_path % p new_path_tmp) in
let* () =
match Bos.OS.Cmd.run_status cmd with
| Ok `Exited 0 ->
Ok ()
| Ok status ->
let _ = Bos.OS.Path.delete new_path_tmp in
Error (`Msg (Fmt.str "cp failed: %a" Bos.OS.Cmd.pp_status status))
| Error _ as e ->
let _ = Bos.OS.Path.delete new_path_tmp in
e
in
Bos.OS.Path.move ~force:true new_path_tmp new_path
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:old_version (module Db) in
let* () = Db.exec new_build_artifact () in
let* () = Db.exec copy_new_build_artifact () in
let* () = Db.iter_s new_build_artifact_paths (move_paths ~force:true datadir) () in
let* () = Db.exec drop_build_artifact () in
let* () = Db.exec rename_build_artifact () in
let* () = Db.exec idx_build_artifact_sha256 () in
let* () = Db.exec idx_build_artifact_build () in
Db.exec (Grej.set_version new_version) ()
let rollback datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:new_version (module Db) in
let* () = Db.exec old_build_artifact () in
let* () = Db.exec copy_old_build_artifact () in
let* () = Db.iter_s old_build_artifact_paths (copy_paths datadir) () in
let* () =
Db.iter_s old_build_artifact_paths
(fun (old_path, _new_path) ->
Bos.OS.Path.delete Fpath.(datadir // v old_path))
()
in
let* () = Db.exec drop_build_artifact () in
let* () = Db.exec rename_build_artifact () in
let* () = Db.exec idx_build_artifact_sha256 () in
Db.exec (Grej.set_version old_version) ()
(* migration failed but managed to move *some* files *)
let fixup_migrate datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:old_version (module Db) in
let* () =
Db.iter_s new_build_artifact_paths
(fun (old_path, new_path) ->
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
let* new_exists = Bos.OS.Path.exists Fpath.(datadir // v new_path) in
if new_exists && not old_exists then
copy_paths datadir (new_path, old_path)
else Ok ())
()
in
Db.iter_s new_build_artifact_paths
(fun (_old_path, new_path) ->
Bos.OS.Path.delete Fpath.(datadir // v new_path))
()
(* rollback failed but some or all artifacts were copied *)
let fixup_rollback datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:new_version (module Db) in
Db.iter_s old_build_artifact_paths
(fun (old_path, new_path) ->
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
if old_exists then
Bos.OS.Path.delete Fpath.(datadir // v new_path)
else
move_paths datadir (new_path, old_path))
()

View file

@ -1,9 +1,9 @@
opam-version: "2.0"
maintainer: "Reynir Björnsson <reynir@reynir.dk>"
authors: ["Reynir Björnsson <reynir@reynir.dk>"]
homepage: "https://github.com/roburio/builder-web"
dev-repo: "git+https://github.com/roburio/builder-web.git"
bug-reports: "https://github.com/roburio/builder-web/issues"
homepage: "https://github.com/robur-coop/builder-web"
dev-repo: "git+https://github.com/robur-coop/builder-web.git"
bug-reports: "https://github.com/robur-coop/builder-web/issues"
license: "ISC"
build: [
@ -17,18 +17,16 @@ build: [
depends: [
"ocaml" {>= "4.13.0"}
"dune" {>= "2.7.0"}
"builder" {>= "0.2.0"}
"dream" {= "1.0.0~alpha4"}
"cstruct" {>= "6.0.0"}
"builder" {>= "0.4.0"}
"dream" {>= "1.0.0~alpha7"}
"bos"
"hex"
"lwt" {>= "5.6.0"}
"caqti" {>= "1.8.0"}
"ohex" {>= "0.2.0"}
"lwt" {>= "5.7.0"}
"caqti" {>= "2.1.2"}
"caqti-lwt"
"caqti-driver-sqlite3"
"pbkdf"
"mirage-crypto-rng"
"scrypt-kdf"
"mirage-crypto-rng" {>= "0.11.0"}
"kdf"
"opam-core"
"opam-format" {>= "2.1.0"}
"metrics" {>= "0.3.0"}
@ -39,25 +37,27 @@ depends: [
"tyxml" {>= "4.3.0"}
"ptime"
"duration"
"mirage-crypto"
"asn1-combinators"
"asn1-combinators" {>= "0.3.0"}
"logs"
"cmdliner" {>= "1.1.0"}
"uri"
"fmt" {>= "0.8.7"}
"omd"
"tar"
"cmarkit" {>= "0.3.0"}
"tar" {>= "3.0.0"}
"tar-unix" {>= "3.0.0"}
"owee"
"solo5-elftool" {>= "0.3.0"}
"decompress"
"alcotest" {with-test}
"decompress" {>= "1.5.0"}
"digestif" {>= "1.2.0"}
"alcotest" {>= "1.2.0" & with-test}
"ppx_deriving" {with-test}
"ppx_deriving_yojson" {with-test}
"yojson" {with-test}
]
synopsis: "Web interface for builder"
description: """
Builder-web takes in submissions of builds, typically from [builder](https://github.com/roburio/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
Produced binaries can be downloaded and executed.
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
"""

View file

@ -4,15 +4,15 @@ open Caqti_request.Infix
let application_id = 1234839235l
(* Please update this when making changes! *)
let current_version = 16L
(* Please update this when making changes! And also update
packaging/batch-viz.sh and packaging/visualizations.sh. *)
let current_version = 18L
type 'a id = 'a Rep.id
type file = Rep.file = {
filepath : Fpath.t;
localpath : Fpath.t;
sha256 : Cstruct.t;
sha256 : string;
size : int;
}
@ -57,7 +57,7 @@ module Job = struct
let get_all_with_section_synopsis =
Caqti_type.unit ->*
Caqti_type.(tup4 (id `job) string (option string) (option string)) @@
Caqti_type.(t4 (id `job) string (option string) (option string)) @@
{| SELECT j.id, j.name, section.value, synopsis.value
FROM job j, tag section_tag, tag synopsis_tag
LEFT JOIN job_tag section ON section.job = j.id AND section.tag = section_tag.id
@ -117,15 +117,15 @@ module Job_tag = struct
"DROP TABLE IF EXISTS job_tag"
let add =
Caqti_type.(tup3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
"INSERT INTO job_tag (tag, value, job) VALUES ($1, $2, $3)"
let update =
Caqti_type.(tup3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
"UPDATE job_tag SET value = $2 WHERE tag = $1 AND job = $3"
let get_value =
Caqti_type.(tup2 (id `tag) (id `job)) ->? Caqti_type.string @@
Caqti_type.(t2 (id `tag) (id `job)) ->? Caqti_type.string @@
"SELECT value FROM job_tag WHERE tag = ? AND job = ?"
let remove_by_job =
@ -140,7 +140,6 @@ module Build_artifact = struct
{| CREATE TABLE build_artifact (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
filepath TEXT NOT NULL, -- the path as in the build
localpath TEXT NOT NULL, -- local path to the file on disk
sha256 BLOB NOT NULL,
size INTEGER NOT NULL,
build INTEGER NOT NULL,
@ -156,26 +155,30 @@ module Build_artifact = struct
let get =
id `build_artifact ->! file @@
{| SELECT filepath, localpath, sha256, size
{| SELECT filepath, sha256, size
FROM build_artifact WHERE id = ? |}
let get_by_build_uuid =
Caqti_type.tup2 uuid fpath ->? Caqti_type.tup2 (id `build_artifact) file @@
Caqti_type.t2 uuid fpath ->? Caqti_type.t2 (id `build_artifact) file @@
{| SELECT build_artifact.id, build_artifact.filepath,
build_artifact.localpath, build_artifact.sha256, build_artifact.size
build_artifact.sha256, build_artifact.size
FROM build_artifact
INNER JOIN build ON build.id = build_artifact.build
WHERE build.uuid = ? AND build_artifact.filepath = ?
|}
let get_all_by_build =
id `build ->* Caqti_type.(tup2 (id `build_artifact) file) @@
"SELECT id, filepath, localpath, sha256, size FROM build_artifact WHERE build = ?"
id `build ->* Caqti_type.(t2 (id `build_artifact) file) @@
"SELECT id, filepath, sha256, size FROM build_artifact WHERE build = ?"
let exists =
Caqti_type.octets ->! Caqti_type.bool @@
"SELECT EXISTS(SELECT 1 FROM build_artifact WHERE sha256 = ?)"
let add =
Caqti_type.(tup2 file (id `build)) ->. Caqti_type.unit @@
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) \
VALUES (?, ?, ?, ?, ?)"
Caqti_type.(t2 file (id `build)) ->. Caqti_type.unit @@
"INSERT INTO build_artifact (filepath, sha256, size, build) \
VALUES (?, ?, ?, ?)"
let remove_by_build =
id `build ->. Caqti_type.unit @@
@ -196,34 +199,55 @@ module Build = struct
script : Fpath.t;
platform : string;
main_binary : [`build_artifact] id option;
input_id : Cstruct.t option;
input_id : string option;
user_id : [`user] id;
job_id : [`job] id;
}
let pp ppf t =
Fmt.pf ppf "@[<hov>{ uuid=@ %a;@ \
start=@ %a;@ \
finish=@ %a;@ \
result=@ @[<hov>%a@];@ \
console=@ %a;@ \
script=@ %a;@ \
platform=@ %S;@ \
main_binary=@ @[<hov>%a@];@ \
input_id=@ @[<hov>%a@];@ \
user_id=@ %Lx;@ \
job_id=@ %Lx;@ }@]"
Uuidm.pp t.uuid
Ptime.pp t.start
Ptime.pp t.finish
Builder.pp_execution_result t.result
Fpath.pp t.console
Fpath.pp t.script
t.platform
Fmt.(Dump.option int64) t.main_binary
Fmt.(Dump.option string) t.input_id
t.user_id
t.job_id
let t =
let rep =
Caqti_type.(tup3
(tup4
uuid
(tup2
Rep.ptime
Rep.ptime)
(tup2
execution_result
fpath)
(tup4
fpath
string
(option (Rep.id `build_artifact))
(option Rep.cstruct)))
Caqti_type.(t11
uuid
Rep.ptime
Rep.ptime
execution_result
fpath
fpath
string
(option (Rep.id `build_artifact))
(option octets)
(id `user)
(id `job))
in
let encode { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id } =
Ok ((uuid, (start, finish), (result, console), (script, platform, main_binary, input_id)), user_id, job_id)
Ok (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id)
in
let decode ((uuid, (start, finish), (result, console), (script, platform, main_binary, input_id)), user_id, job_id) =
let decode (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id) =
Ok { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id }
in
Caqti_type.custom ~encode ~decode rep
@ -258,7 +282,7 @@ module Build = struct
"DROP TABLE IF EXISTS build"
let get_by_uuid =
Rep.uuid ->? Caqti_type.tup2 (id `build) t @@
Rep.uuid ->? Caqti_type.t2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg,
console, script, platform, main_binary, input_id, user, job
@ -267,7 +291,7 @@ module Build = struct
|}
let get_all =
id `job ->* Caqti_type.tup2 (id `build) t @@
id `job ->* Caqti_type.t2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console,
script, platform, main_binary, input_id, user, job
@ -277,20 +301,20 @@ module Build = struct
|}
let get_all_failed =
Caqti_type.(tup3 int int (option string)) ->* Caqti_type.tup2 Caqti_type.string t @@
Caqti_type.(t3 int int (option string)) ->* Caqti_type.t2 Caqti_type.string t @@
{| SELECT job.name, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script, b.platform,
b.main_binary, b.input_id, b.user, b.job
FROM build b
INNER JOIN job ON job.id = b.job
WHERE b.result_code <> 0 AND ($3 IS NULL OR b.platform = $3)
WHERE b.main_binary IS NULL AND ($3 IS NULL OR b.platform = $3)
ORDER BY start_d DESC, start_ps DESC
LIMIT $2
OFFSET $1
|}
let get_all_artifact_sha =
Caqti_type.(tup2 (id `job) (option string)) ->* Rep.cstruct @@
Caqti_type.(t2 (id `job) (option string)) ->* Caqti_type.octets @@
{| SELECT DISTINCT a.sha256
FROM build_artifact a, build b
WHERE b.job = $1 AND b.main_binary = a.id
@ -299,40 +323,79 @@ module Build = struct
|}
let get_failed_builds =
Caqti_type.(tup2 (id `job) (option string)) ->* t @@
Caqti_type.(t2 (id `job) (option string)) ->* t @@
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script,
platform, main_binary, input_id, user, job
FROM build
WHERE job = $1 AND result_code <> 0
WHERE job = $1
AND main_binary IS NULL
AND ($2 IS NULL OR platform = $2)
ORDER BY start_d DESC, start_ps DESC
|}
let get_latest_successful_with_binary =
Caqti_type.(tup2 (id `job) string) ->? Caqti_type.tup3 (id `build) t file_opt @@
Caqti_type.(t2 (id `job) string) ->? Caqti_type.t3 (id `build) t file @@
{| SELECT b.id,
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script,
b.platform, b.main_binary, b.input_id, b.user, b.job,
a.filepath, a.localpath, a.sha256, a.size
FROM build b
LEFT JOIN build_artifact a ON
b.main_binary = a.id
WHERE b.job = $1 AND b.platform = $2 AND b.result_code = 0
a.filepath, a.sha256, a.size
FROM build b, build_artifact a
WHERE b.main_binary = a.id AND b.job = $1 AND b.platform = $2
AND b.main_binary IS NOT NULL
ORDER BY b.start_d DESC, b.start_ps DESC
LIMIT 1
|}
let get_builds_older_than =
Caqti_type.(t3 (id `job) (option string) Rep.ptime) ->* Caqti_type.t2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script,
platform, main_binary, input_id, user, job
FROM build
WHERE job = $1
AND ($2 IS NULL OR platform = $2)
AND (finish_d < $3 OR (finish_d = $3 AND finish_ps <= $4))
ORDER BY start_d DESC, start_ps DESC
|}
let get_builds_excluding_latest_n =
Caqti_type.(t3 (id `job) (option string) int) ->* Caqti_type.t2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script,
platform, main_binary, input_id, user, job
FROM build
WHERE job = $1
AND ($2 IS NULL OR platform = $2)
ORDER BY start_d DESC, start_ps DESC
LIMIT -1 OFFSET $3
|}
(* "LIMIT -1 OFFSET n" is all rows except the first n *)
let get_nth_latest_successful =
Caqti_type.(t3 (id `job) (option string) int) ->? Caqti_type.t2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script,
platform, main_binary, input_id, user, job
FROM build
WHERE job = $1
AND ($2 IS NULL OR platform = $2)
AND main_binary IS NOT NULL
ORDER BY start_d DESC, start_ps DESC
LIMIT 1 OFFSET $3
|}
let get_latest_successful =
Caqti_type.(tup2 (id `job) (option string)) ->? t @@
Caqti_type.(t2 (id `job) (option string)) ->? t @@
{| SELECT
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script,
b.platform, b.main_binary, b.input_id, b.user, b.job
FROM build b
WHERE b.job = $1 AND b.result_code = 0
WHERE b.job = $1
AND ($2 IS NULL OR b.platform = $2)
AND b.main_binary IS NOT NULL
ORDER BY b.start_d DESC, b.start_ps DESC
LIMIT 1
|}
@ -346,7 +409,7 @@ module Build = struct
FROM build b, build b0, build_artifact a, build_artifact a0
WHERE b0.id = ? AND b0.job = b.job AND
b.platform = b0.platform AND
b.result_code = 0 AND
b.main_binary IS NOT NULL AND
a.id = b.main_binary AND a0.id = b0.main_binary AND
a.sha256 <> a0.sha256 AND
(b0.start_d > b.start_d OR b0.start_d = b.start_d AND b0.start_ps > b.start_ps)
@ -363,7 +426,7 @@ module Build = struct
FROM build b, build b0, build_artifact a, build_artifact a0
WHERE b0.id = ? AND b0.job = b.job AND
b.platform = b0.platform AND
b.result_code = 0 AND
b.main_binary IS NOT NULL AND
a.id = b.main_binary AND a0.id = b0.main_binary AND
a.sha256 <> a0.sha256 AND
(b0.start_d < b.start_d OR b0.start_d = b.start_d AND b0.start_ps < b.start_ps)
@ -383,7 +446,7 @@ module Build = struct
|}
let get_same_input_different_output_hashes =
id `build ->* Rep.cstruct @@
id `build ->* Caqti_type.octets @@
{| SELECT DISTINCT a.sha256
FROM build b0, build_artifact a0, build b, build_artifact a
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 <> a.sha256
@ -392,7 +455,7 @@ module Build = struct
|}
let get_different_input_same_output_input_ids =
id `build ->* Rep.cstruct @@
id `build ->* Caqti_type.octets @@
{| SELECT DISTINCT b.input_id
FROM build b0, build_artifact a0, build b, build_artifact a
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 = a.sha256
@ -400,7 +463,7 @@ module Build = struct
|}
let get_one_by_input_id =
Rep.cstruct ->! t @@
Caqti_type.octets ->! t @@
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script,
platform, main_binary, input_id, user, job
@ -424,7 +487,7 @@ module Build = struct
|}
let get_by_hash =
Rep.cstruct ->! t @@
Caqti_type.octets ->! t @@
{| SELECT
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script,
@ -437,11 +500,11 @@ module Build = struct
|}
let get_with_main_binary_by_hash =
Rep.cstruct ->! Caqti_type.tup2 t file_opt @@
Caqti_type.octets ->! Caqti_type.t2 t file_opt @@
{| SELECT b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script,
b.platform, b.main_binary, b.input_id, b.user, b.job,
a.filepath, a.localpath, a.sha256, a.size
a.filepath, a.sha256, a.size
FROM build_artifact a
INNER JOIN build b ON b.id = a.build
WHERE a.sha256 = ?
@ -450,7 +513,7 @@ module Build = struct
|}
let get_with_jobname_by_hash =
Rep.cstruct ->? Caqti_type.tup2 Caqti_type.string t @@
Caqti_type.octets ->? Caqti_type.t2 Caqti_type.string t @@
{| SELECT job.name,
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg,
@ -464,7 +527,7 @@ module Build = struct
|}
let set_main_binary =
Caqti_type.tup2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@
Caqti_type.t2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@
"UPDATE build SET main_binary = $2 WHERE id = $1"
let remove =
@ -492,7 +555,7 @@ module User = struct
"DROP TABLE IF EXISTS user"
let get_user =
Caqti_type.string ->? Caqti_type.tup2 (id `user) user_info @@
Caqti_type.string ->? Caqti_type.t2 (id `user) user_info @@
{| SELECT id, username, password_hash, password_salt,
scrypt_n, scrypt_r, scrypt_p, restricted
FROM user
@ -546,15 +609,15 @@ module Access_list = struct
"DROP TABLE IF EXISTS access_list"
let get =
Caqti_type.tup2 (id `user) (id `job) ->! id `access_list @@
Caqti_type.t2 (id `user) (id `job) ->! id `access_list @@
"SELECT id FROM access_list WHERE user = ? AND job = ?"
let add =
Caqti_type.tup2 (id `user) (id `job) ->. Caqti_type.unit @@
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@
"INSERT INTO access_list (user, job) VALUES (?, ?)"
let remove =
Caqti_type.tup2 (id `user) (id `job) ->. Caqti_type.unit @@
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@
"DELETE FROM access_list WHERE user = ? AND job = ?"
let remove_by_job =
@ -585,13 +648,15 @@ let migrate = [
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)";
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE result_code <> 0";
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE main_binary IS NULL";
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_input_id ON build(input_id)";
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_main_binary ON build(main_binary)";
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)";
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)";
set_current_version;
set_application_id;
]
@ -605,6 +670,8 @@ let rollback = [
Build.rollback;
Job.rollback;
Caqti_type.unit ->. Caqti_type.unit @@
"DROP INDEX IF EXISTS idx_build_artifact_build";
Caqti_type.unit ->. Caqti_type.unit @@
"DROP INDEX IF EXISTS idx_build_artifact_sha256";
Caqti_type.unit ->. Caqti_type.unit @@
"DROP INDEX IF EXISTS idx_build_failed";

View file

@ -3,8 +3,7 @@ module Rep : sig
type 'a id
type file = {
filepath : Fpath.t;
localpath : Fpath.t;
sha256 : Cstruct.t;
sha256 : string;
size : int;
}
@ -14,7 +13,6 @@ module Rep : sig
val uuid : Uuidm.t Caqti_type.t
val ptime : Ptime.t Caqti_type.t
val fpath : Fpath.t Caqti_type.t
val cstruct : Cstruct.t Caqti_type.t
val file : file Caqti_type.t
val execution_result : Builder.execution_result Caqti_type.t
val console : (int * string) list Caqti_type.t
@ -23,8 +21,7 @@ type 'a id = 'a Rep.id
type file = Rep.file = {
filepath : Fpath.t;
localpath : Fpath.t;
sha256 : Cstruct.t;
sha256 : string;
size : int;
}
@ -87,6 +84,7 @@ module Build_artifact : sig
Caqti_request.t
val get_all_by_build :
([`build] id, [`build_artifact] id * file, [ `Many | `One | `Zero ]) Caqti_request.t
val exists : (string, bool, [ `One ]) Caqti_request.t
val add :
(file * [`build] id, unit, [ `Zero ]) Caqti_request.t
val remove_by_build :
@ -106,11 +104,13 @@ sig
script : Fpath.t;
platform : string;
main_binary : [`build_artifact] id option;
input_id : Cstruct.t option;
input_id : string option;
user_id : [`user] id;
job_id : [`job] id;
}
val pp : t Fmt.t
val get_by_uuid :
(Uuidm.t, [`build] id * t, [ `One | `Zero ])
Caqti_request.t
@ -119,15 +119,21 @@ sig
val get_all_failed :
(int * int * string option, string * t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_all_artifact_sha :
([`job] id * string option, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
([`job] id * string option, string, [ `Many | `One | `Zero ]) Caqti_request.t
val get_latest_successful_with_binary :
([`job] id * string, [`build] id * t * file option, [ `One | `Zero ])
([`job] id * string, [`build] id * t * file, [ `One | `Zero ])
Caqti_request.t
val get_failed_builds :
([`job] id * string option, t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_latest_successful :
([`job] id * string option, t, [ `One | `Zero ])
Caqti_request.t
val get_builds_older_than :
([`job] id * string option * Ptime.t, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_builds_excluding_latest_n :
([`job] id * string option * int, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_nth_latest_successful :
([`job] id * string option * int, [`build] id * t, [ `One | `Zero ]) Caqti_request.t
val get_previous_successful_different_output :
([`build] id, t, [ `One | `Zero ])
Caqti_request.t
@ -137,20 +143,20 @@ sig
val get_same_input_same_output_builds :
([`build] id, t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_same_input_different_output_hashes :
([`build] id, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
val get_different_input_same_output_input_ids :
([`build] id, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
val get_one_by_input_id :
(Cstruct.t, t, [ `One ]) Caqti_request.t
(string, t, [ `One ]) Caqti_request.t
val get_platforms_for_job :
([`job] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
val add : (t, unit, [ `Zero ]) Caqti_request.t
val get_by_hash :
(Cstruct.t, t, [ `One]) Caqti_request.t
(string, t, [ `One]) Caqti_request.t
val get_with_main_binary_by_hash :
(Cstruct.t, t * file option, [ `One]) Caqti_request.t
(string, t * file option, [ `One]) Caqti_request.t
val get_with_jobname_by_hash :
(Cstruct.t, string * t, [ `One | `Zero]) Caqti_request.t
(string, string * t, [ `One | `Zero]) Caqti_request.t
val set_main_binary : ([`build] id * [`build_artifact] id, unit, [ `Zero ]) Caqti_request.t
val remove : ([`build] id, unit, [ `Zero]) Caqti_request.t
end

View file

@ -1,3 +1,3 @@
(library
(name builder_db)
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators mirage-crypto builder_web_auth))
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators builder_web_auth))

View file

@ -1,8 +1,8 @@
module Asn = struct
let decode_strict codec cs =
match Asn.decode codec cs with
| Ok (a, cs) ->
if Cstruct.length cs = 0
| Ok (a, rest) ->
if String.length rest = 0
then Ok a
else Error "trailing bytes"
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
@ -17,7 +17,7 @@ module Asn = struct
(required ~label:"delta" int)
(required ~label:"data" utf8_string)))
let console_of_cs, console_to_cs = projections_of console
let console_of_str, console_to_str = projections_of console
end
type untyped_id = int64
@ -30,8 +30,7 @@ let id_to_int64 (id : 'a id) : int64 = id
type file = {
filepath : Fpath.t;
localpath : Fpath.t;
sha256 : Cstruct.t;
sha256 : string;
size : int;
}
@ -48,7 +47,7 @@ let ptime =
let encode t = Ok (Ptime.Span.to_d_ps (Ptime.to_span t)) in
let decode (d, ps) = Ok (Ptime.v (d, ps))
in
let rep = Caqti_type.(tup2 int int64) in
let rep = Caqti_type.(t2 int int64) in
Caqti_type.custom ~encode ~decode rep
let fpath =
@ -57,30 +56,25 @@ let fpath =
|> Result.map_error (fun (`Msg s) -> s) in
Caqti_type.custom ~encode ~decode Caqti_type.string
let cstruct =
let encode t = Ok (Cstruct.to_string t) in
let decode s = Ok (Cstruct.of_string s) in
Caqti_type.custom ~encode ~decode Caqti_type.octets
let file =
let encode { filepath; localpath; sha256; size } =
Ok (filepath, localpath, sha256, size) in
let decode (filepath, localpath, sha256, size) =
Ok { filepath; localpath; sha256; size } in
Caqti_type.custom ~encode ~decode Caqti_type.(tup4 fpath fpath cstruct int)
let encode { filepath; sha256; size } =
Ok (filepath, sha256, size) in
let decode (filepath, sha256, size) =
Ok { filepath; sha256; size } in
Caqti_type.custom ~encode ~decode Caqti_type.(t3 fpath octets int)
let file_opt =
let rep = Caqti_type.(tup4 (option fpath) (option fpath) (option cstruct) (option int)) in
let rep = Caqti_type.(t3 (option fpath) (option octets) (option int)) in
let encode = function
| Some { filepath; localpath; sha256; size } ->
Ok (Some filepath, Some localpath, Some sha256, Some size)
| Some { filepath; sha256; size } ->
Ok (Some filepath, Some sha256, Some size)
| None ->
Ok (None, None, None, None)
Ok (None, None, None)
in
let decode = function
| (Some filepath, Some localpath, Some sha256, Some size) ->
Ok (Some { filepath; localpath; sha256; size })
| (None, None, None, None) ->
| (Some filepath, Some sha256, Some size) ->
Ok (Some { filepath; sha256; size })
| (None, None, None) ->
Ok None
| _ ->
(* This should not happen if the database is well-formed *)
@ -109,25 +103,25 @@ let execution_result =
else
Error "bad encoding (unknown number)"
in
let rep = Caqti_type.(tup2 int (option string)) in
let rep = Caqti_type.(t2 int (option string)) in
Caqti_type.custom ~encode ~decode rep
let console =
let encode console = Ok (Asn.console_to_cs console) in
let decode data = Asn.console_of_cs data in
Caqti_type.custom ~encode ~decode cstruct
let encode console = Ok (Asn.console_to_str console) in
let decode data = Asn.console_of_str data in
Caqti_type.(custom ~encode ~decode octets)
let user_info =
let rep = Caqti_type.(tup4 string cstruct cstruct (tup4 int int int bool)) in
let rep = Caqti_type.(t7 string octets octets int int int bool) in
let encode { Builder_web_auth.username;
password_hash = `Scrypt (password_hash, password_salt, {
Builder_web_auth.scrypt_n; scrypt_r; scrypt_p
});
restricted; }
=
Ok (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted))
Ok (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted)
in
let decode (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted)) =
let decode (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) =
Ok { Builder_web_auth.username;
password_hash =
`Scrypt (password_hash, password_salt,

View file

@ -1,2 +1,3 @@
(lang dune 2.7)
(name builder-web)
(formatting disabled)

View file

@ -26,7 +26,7 @@ let init_datadir datadir =
let init dbpath datadir =
Result.bind (init_datadir datadir) @@ fun () ->
Lwt_main.run (
Caqti_lwt.connect
Caqti_lwt_unix.connect
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
>>= fun (module Db : Caqti_lwt.CONNECTION) ->
Db.find Builder_db.get_application_id () >>= fun application_id ->
@ -134,14 +134,14 @@ module Viz_aux = struct
viz_dir ~cachedir ~viz_typ ~version
/ input_hash + "html"
)
let choose_versioned_viz_path
~cachedir
~viz_typ
~viz_input_hash
~current_version =
let ( >>= ) = Result.bind in
let rec aux current_version =
let rec aux current_version =
let path =
viz_path ~cachedir
~viz_typ
@ -153,7 +153,7 @@ module Viz_aux = struct
Error (`Msg (Fmt.str "viz '%s': There exist no version of the requested \
visualization"
(viz_type_to_string viz_typ)))
else
else
aux @@ pred current_version
)
in
@ -162,7 +162,7 @@ module Viz_aux = struct
let get_viz_version_from_dirs ~cachedir ~viz_typ =
let ( >>= ) = Result.bind in
Bos.OS.Dir.contents cachedir >>= fun versioned_dirs ->
let max_cached_version =
let max_cached_version =
let viz_typ_str = viz_type_to_string viz_typ ^ "_" in
versioned_dirs
|> List.filter_map (fun versioned_dir ->
@ -171,7 +171,7 @@ module Viz_aux = struct
Logs.warn (fun m -> m "%s" err);
None
| Ok false -> None
| Ok true ->
| Ok true ->
let dir_str = Fpath.filename versioned_dir in
if not (String.starts_with ~prefix:viz_typ_str dir_str) then
None
@ -199,10 +199,6 @@ module Viz_aux = struct
let hash_viz_input ~uuid typ db =
let open Builder_db in
let hex cstruct =
let `Hex hex_str = Hex.of_cstruct cstruct in
hex_str
in
main_binary_of_uuid uuid db >>= fun main_binary ->
Model.build uuid db
|> if_error "Error getting build" >>= fun (build_id, _build) ->
@ -210,29 +206,29 @@ module Viz_aux = struct
|> if_error "Error getting build artifacts" >>= fun artifacts ->
match typ with
| `Treemap ->
let debug_binary =
let bin = Fpath.base main_binary.localpath in
let debug_binary =
let bin = Fpath.base main_binary.filepath in
List.find_opt
(fun p -> Fpath.(equal (bin + "debug") (base p.localpath)))
(fun p -> Fpath.(equal (bin + "debug") (base p.filepath)))
artifacts
in
begin
Model.not_found debug_binary
|> not_found_error >>= fun debug_binary ->
debug_binary.sha256
|> hex
|> Ohex.encode
|> Lwt_result.return
end
| `Dependencies ->
end
| `Dependencies ->
let opam_switch =
List.find_opt
(fun p -> Fpath.(equal (v "opam-switch") (base p.localpath)))
(fun p -> Fpath.(equal (v "opam-switch") (base p.filepath)))
artifacts
in
Model.not_found opam_switch
|> not_found_error >>= fun opam_switch ->
opam_switch.sha256
|> hex
|> Ohex.encode
|> Lwt_result.return
let try_load_cached_visualization ~cachedir ~uuid viz_typ db =
@ -247,7 +243,7 @@ module Viz_aux = struct
|> Lwt.return
|> if_error "Error finding a version of the requested visualization")
>>= fun viz_path ->
Lwt_result.catch (
Lwt_result.catch (fun () ->
Lwt_io.with_file ~mode:Lwt_io.Input
(Fpath.to_string viz_path)
Lwt_io.read
@ -258,8 +254,17 @@ module Viz_aux = struct
end
let routes ~datadir ~cachedir ~configdir =
let builds req =
let routes ~datadir ~cachedir ~configdir ~expired_jobs =
let builds ~all ?(filter_builds_later_than = 0) req =
let than =
if filter_builds_later_than = 0 then
Ptime.epoch
else
let n = Ptime.Span.v (filter_builds_later_than, 0L) in
let now = Ptime_clock.now () in
Ptime.Span.sub (Ptime.to_span now) n |> Ptime.of_span |>
Option.fold ~none:Ptime.epoch ~some:Fun.id
in
Dream.sql req Model.jobs_with_section_synopsis
|> if_error "Error getting jobs"
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
@ -272,20 +277,26 @@ let routes ~datadir ~cachedir ~configdir =
r >>= fun acc ->
Dream.sql req (Model.build_with_main_binary job_id platform) >>= function
| Some (build, artifact) ->
Lwt_result.return ((platform, build, artifact) :: acc)
if Ptime.is_later ~than build.finish then
Lwt_result.return ((platform, build, artifact) :: acc)
else
Lwt_result.return acc
| None ->
Log.warn (fun m -> m "Job without builds: %s" job_name);
Lwt_result.return acc)
ps (Lwt_result.return []) >>= fun platform_builds ->
let v = (job_name, synopsis, platform_builds) in
let section = Option.value ~default:"Uncategorized" section in
Lwt_result.return (Utils.String_map.add_or_create section v acc))
if platform_builds = [] then
Lwt_result.return acc
else
let v = (job_name, synopsis, platform_builds) in
let section = Option.value ~default:"Uncategorized" section in
Lwt_result.return (Utils.String_map.add_or_create section v acc))
jobs
(Lwt_result.return Utils.String_map.empty)
|> if_error "Error getting jobs"
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
>>= fun jobs ->
Views.Builds.make jobs |> string_of_html |> Dream.html |> Lwt_result.ok
Views.Builds.make ~all jobs |> string_of_html |> Dream.html |> Lwt_result.ok
in
let job req =
@ -413,15 +424,15 @@ let routes ~datadir ~cachedir ~configdir =
|> if_error ~status:`Not_Found "File not found" >>= fun filepath ->
Dream.sql req (Model.build_artifact build filepath)
|> if_error "Error getting build artifact" >>= fun file ->
let etag = Base64.encode_string (Cstruct.to_string file.Builder_db.sha256) in
let etag = Base64.encode_string file.Builder_db.sha256 in
match if_none_match with
| Some etag' when etag = etag' ->
Dream.empty `Not_Modified |> Lwt_result.ok
| _ ->
Model.build_artifact_data datadir file
|> if_error "Error getting build artifact"
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a in %a: %a"
Fpath.pp file.Builder_db.filepath Fpath.pp file.Builder_db.localpath
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a: %a"
Fpath.pp file.Builder_db.filepath
pp_error e)) >>= fun data ->
let headers = [
"Content-Type", mime_lookup file.Builder_db.filepath;
@ -471,13 +482,19 @@ let routes ~datadir ~cachedir ~configdir =
|> Option.to_result ~none:(`Msg "bad finish time") |> Result.map Int64.of_int
|> Lwt.return |> if_error "Internal server error" >>= fun finish ->
Dream.stream ~headers:["Content-Type", "application/tar+gzip"]
(Dream_tar.targz_response datadir finish artifacts)
(fun stream ->
let+ r = Dream_tar.targz_response datadir finish artifacts stream in
match r with
| Ok () -> ()
| Error _ ->
Log.warn (fun m -> m "error assembling gzipped tar archive");
())
|> Lwt_result.ok
in
let upload req =
let* body = Dream.body req in
Builder.Asn.exec_of_cs (Cstruct.of_string body) |> Lwt.return
Builder.Asn.exec_of_str body |> Lwt.return
|> if_error ~status:`Bad_Request "Bad request"
~log:(fun e ->
Log.warn (fun m -> m "Received bad builder ASN.1");
@ -509,7 +526,7 @@ let routes ~datadir ~cachedir ~configdir =
Dream.query req "sha256" |> Option.to_result ~none:(`Msg "Missing sha256 query parameter")
|> Lwt.return
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash_hex ->
begin try Hex.to_cstruct (`Hex hash_hex) |> Lwt_result.return
begin try Ohex.decode hash_hex |> Lwt_result.return
with Invalid_argument e -> Lwt_result.fail (`Msg ("Bad hex: " ^ e))
end
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash ->
@ -602,27 +619,11 @@ let routes ~datadir ~cachedir ~configdir =
>>= fun () -> Dream.respond "" |> Lwt_result.ok
in
let redirect_parent req =
let queries = Dream.all_queries req in
let parent_url =
let parent_path =
Dream.target req
|> Utils.Path.of_url
|> List.rev |> List.tl |> List.rev
in
Utils.Path.to_url ~path:parent_path ~queries
in
Dream.redirect ~status:`Temporary_Redirect req parent_url
|> Lwt_result.ok
in
let w f req = or_error_response (f req) in
[
`Get, "/", (w builds);
`Get, "/job", (w redirect_parent);
`Get, "/", (w (builds ~all:false ~filter_builds_later_than:expired_jobs));
`Get, "/job/:job", (w job);
`Get, "/job/:job/build", (w redirect_parent);
`Get, "/job/:job/failed", (w job_with_failed);
`Get, "/job/:job/build/latest/**", (w redirect_latest);
`Get, "/job/:job/build/latest", (w redirect_latest_no_slash);
@ -633,8 +634,9 @@ let routes ~datadir ~cachedir ~configdir =
`Get, "/job/:job/build/:build/vizdependencies", (w @@ job_build_viz `Dependencies);
`Get, "/job/:job/build/:build/script", (w (job_build_static_file `Script));
`Get, "/job/:job/build/:build/console", (w (job_build_static_file `Console));
`Get, "/failed-builds", (w failed_builds);
`Get, "/job/:job/build/:build/all.tar.gz", (w job_build_targz);
`Get, "/failed-builds", (w failed_builds);
`Get, "/all-builds", (w (builds ~all:true));
`Get, "/hash", (w hash);
`Get, "/compare/:build_left/:build_right", (w compare_builds);
`Post, "/upload", (Authorization.authenticate (w upload));
@ -670,7 +672,7 @@ module Middleware = struct
let queries = Dream.all_queries req in
let url = Utils.Path.to_url ~path ~queries in
(*> Note: See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Location*)
Dream.redirect ~status:`Permanent_Redirect req url
Dream.redirect ~status:`Moved_Permanently req url
| _ (* /... *) -> handler req
end

View file

@ -1,47 +1,36 @@
open Lwt.Infix
module High : sig
type t
type 'a s = 'a Lwt.t
module Writer = struct
type out_channel =
{ mutable gz : Gz.Def.encoder
; ic : Cstruct.t
; oc : Cstruct.t
; stream : Dream.stream }
external inj : 'a s -> ('a, t) Tar.io = "%identity"
external prj : ('a, t) Tar.io -> 'a s = "%identity"
end = struct
type t
type 'a s = 'a Lwt.t
type 'a t = 'a Lwt.t
let really_write ({ oc; stream; _ } as state) cs =
let rec until_await gz =
match Gz.Def.encode gz with
| `Await gz -> state.gz <- gz ; Lwt.return_unit
| `Flush gz ->
let max = Cstruct.length oc - Gz.Def.dst_rem gz in
let str = Cstruct.copy oc 0 max in
Dream.write stream str >>= fun () ->
let { Cstruct.buffer; off= cs_off; len= cs_len; } = oc in
until_await (Gz.Def.dst gz buffer cs_off cs_len)
| `End _gz -> assert false in
if Cstruct.length cs = 0
then Lwt.return_unit
else ( let { Cstruct.buffer; off; len; } = cs in
let gz = Gz.Def.src state.gz buffer off len in
until_await gz )
external inj : 'a -> 'b = "%identity"
external prj : 'a -> 'b = "%identity"
end
module HW = Tar.HeaderWriter(Lwt)(Writer)
let value v = Tar.High (High.inj v)
let write_block (header : Tar.Header.t) lpath ({ Writer.ic= buf; _ } as state) =
HW.write ~level:Tar.Header.Ustar header state >>= fun () ->
Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string lpath) >>= fun ic ->
let rec loop () =
let { Cstruct.buffer; off; len; } = buf in
Lwt_io.read_into_bigstring ic buffer off len >>= function
| 0 -> Lwt.return ()
| len' ->
Writer.really_write state (Cstruct.sub buf 0 len') >>= fun () ->
loop ()
let ok_value v = value (Lwt_result.ok v)
let run t stream =
let rec run : type a. (a, 'err, High.t) Tar.t -> (a, 'err) result Lwt.t =
function
| Tar.Write str ->
(* Can this not fail?!? Obviously, it can, but we never know?? *)
Lwt_result.ok (Dream.write stream str)
| Tar.Seek _ | Tar.Read _ | Tar.Really_read _ -> assert false
| Tar.Return value -> Lwt.return value
| Tar.High value -> High.prj value
| Tar.Bind (x, f) ->
let open Lwt_result.Syntax in
let* v = run x in
run (f v)
in
loop () >>= fun () ->
Writer.really_write state (Tar.Header.zero_padding header)
run t
let header_of_file mod_time (file : Builder_db.file) =
let file_mode = if Fpath.is_prefix Fpath.(v "bin/") file.filepath then
@ -51,38 +40,53 @@ let header_of_file mod_time (file : Builder_db.file) =
in
Tar.Header.make ~file_mode ~mod_time (Fpath.to_string file.filepath) (Int64.of_int file.size)
let targz_response datadir finish (files : Builder_db.file list) (stream : Dream.stream) =
let state =
let ic = Cstruct.create (4 * 4 * 1024) in
let oc = Cstruct.create 4096 in
let gz =
let w = De.Lz77.make_window ~bits:15 in
let q = De.Queue.create 0x1000 in
let mtime = Int32.of_float (Unix.gettimeofday ()) in
let gz = Gz.Def.encoder `Manual `Manual ~mtime Gz.Unix ~q ~w ~level:4 in
let { Cstruct.buffer; off; len; } = oc in
Gz.Def.dst gz buffer off len
let contents datadir file : unit -> (string option, _, _) Tar.t =
let state = ref `Initial in
let dispenser () =
let ( let* ) = Tar.( let* ) in
let src = Fpath.append datadir (Model.artifact_path file) in
let* state' =
match !state with
| `Initial ->
let* fd = ok_value (Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string src)) in
let s = `Active fd in
state := s; Tar.return (Ok s)
| `Active _ | `Closed as s -> Tar.return (Ok s)
in
{ Writer.gz; ic; oc; stream; }
match state' with
| `Closed -> Tar.return (Ok None)
| `Active fd ->
let* data = ok_value (Lwt_io.read ~count:65536 fd) in
if String.length data = 0 then begin
state := `Closed;
let* () = ok_value (Lwt_io.close fd) in
Tar.return (Ok None)
end else
Tar.return (Ok (Some data))
in
Lwt_list.iter_s (fun file ->
let hdr = header_of_file finish file in
write_block hdr Fpath.(datadir // file.localpath) state)
files >>= fun () ->
Writer.really_write state Tar.Header.zero_block >>= fun () ->
Writer.really_write state Tar.Header.zero_block >>= fun () ->
(* assert (Gz.Def.encode gz = `Await) *)
let rec until_end gz = match Gz.Def.encode gz with
| `Await _gz -> assert false
| `Flush gz | `End gz as flush_or_end ->
let max = Cstruct.length state.oc - Gz.Def.dst_rem gz in
let str = Cstruct.copy state.oc 0 max in
Dream.write stream str >>= fun () -> match flush_or_end with
| `Flush gz ->
let { Cstruct.buffer; off= cs_off; len= cs_len; } = state.oc in
until_end (Gz.Def.dst gz buffer cs_off cs_len)
| `End _ -> Lwt.return_unit
dispenser
let entries datadir finish files =
let files =
List.map (fun file ->
let hdr = header_of_file finish file in
let level = Some Tar.Header.Posix in
(level, hdr, contents datadir file)
)
files
in
until_end (Gz.Def.src state.gz De.bigstring_empty 0 0) >>= fun () ->
Dream.flush stream >>= fun () ->
Dream.close stream
let files = ref files in
fun () -> match !files with
| [] -> Tar.return (Ok None)
| f :: fs -> files := fs; Tar.return (Ok (Some f))
let targz_response datadir finish files stream =
let entries : (_, _) Tar.entries = entries datadir finish files in
let global_hdr =
Tar.Header.Extended.make
~comment:"Tar file produced by builder-web.%%VERSION_NUM%%"
()
in
let finish32 = Int64.to_int32 finish in
Logs.err (fun m -> m "finished at %ld (%Ld)" finish32 finish);
run (Tar_gz.out_gzipped ~level:9 ~mtime:finish32 Gz.Unix (Tar.out ~global_hdr entries)) stream

View file

@ -1,21 +1,5 @@
(library
(name builder_web)
(libraries
builder
builder_db
dream
tyxml
bos
duration
hex
caqti-lwt
opamdiff
ptime.clock.os
omd
tar
owee
solo5-elftool
decompress.de
decompress.gz
uri
))
(libraries builder builder_db dream tyxml bos duration ohex caqti-lwt
opamdiff ptime.clock.os cmarkit tar tar.gz tar-unix owee solo5-elftool decompress.de
decompress.gz uri digestif))

View file

@ -19,6 +19,14 @@ let not_found = function
| Some v -> Lwt_result.return v
let staging datadir = Fpath.(datadir / "_staging")
let artifact_path artifact =
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
(* NOTE: We add the prefix to reduce the number of files in a directory - a
workaround for inferior filesystems. We can easily revert this by changing
this function and adding a migration. *)
let prefix = String.sub sha256 0 2 in
Fpath.(v "_artifacts" / prefix / sha256)
let read_file datadir filepath =
let filepath = Fpath.(datadir // filepath) in
@ -34,7 +42,7 @@ let read_file datadir filepath =
Log.warn (fun m -> m "Error reading local file %a: %s"
Fpath.pp filepath (Unix.error_message e));
Lwt.return_error (`File_error filepath)
| e -> Lwt.fail e)
| e -> Lwt.reraise e)
let build_artifact build filepath (module Db : CONN) =
Db.find_opt Builder_db.Build_artifact.get_by_build_uuid (build, filepath)
@ -44,14 +52,14 @@ let build_artifact_by_id id (module Db : CONN) =
Db.find Builder_db.Build_artifact.get id
let build_artifact_data datadir file =
read_file datadir file.Builder_db.localpath
read_file datadir (artifact_path file)
let build_artifacts build (module Db : CONN) =
Db.collect_list Builder_db.Build_artifact.get_all_by_build build >|=
List.map snd
let solo5_manifest datadir file =
let buf = Owee_buf.map_binary Fpath.(to_string (datadir // file.Builder_db.localpath)) in
let buf = Owee_buf.map_binary Fpath.(to_string (datadir // artifact_path file)) in
Solo5_elftool.query_manifest buf |> Result.to_option
let platforms_of_job id (module Db : CONN) =
@ -196,46 +204,42 @@ let cleanup_staging datadir (module Db : Caqti_lwt.CONNECTION) =
(cleanup_staged staged))
stageds
let save file data =
let save path data =
let open Lwt.Infix in
Lwt.catch
(fun () ->
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string file) >>= fun oc ->
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string path) >>= fun oc ->
Lwt_io.write oc data >>= fun () ->
Lwt_io.close oc
|> Lwt_result.ok)
(function
| Unix.Unix_error (e, _, _) ->
Lwt_result.fail (`Msg (Unix.error_message e))
| e -> Lwt.fail e)
| e -> Lwt.reraise e)
let save_file dir staging (filepath, data) =
let size = String.length data in
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
let localpath = Fpath.append dir filepath in
let destpath = Fpath.append staging filepath in
Lwt_result.lift (Bos.OS.Dir.create (Fpath.parent destpath)) >>= fun _ ->
save destpath data >|= fun () ->
{ Builder_db.filepath; localpath; sha256; size }
let save_files dir staging files =
let save_artifacts staging artifacts =
List.fold_left
(fun r file ->
r >>= fun acc ->
save_file dir staging file >>= fun file ->
Lwt_result.return (file :: acc))
(Lwt_result.return [])
files
(fun r (file, data) ->
r >>= fun () ->
let sha256 = Ohex.encode file.Builder_db.sha256 in
let destpath = Fpath.(staging / sha256) in
save destpath data)
(Lwt_result.return ())
artifacts
let save_all staging_dir (job : Builder.script_job) uuid artifacts =
let build_dir = Fpath.(v job.Builder.name / Uuidm.to_string uuid) in
let output_dir = Fpath.(build_dir / "output")
and staging_output_dir = Fpath.(staging_dir / "output") in
Lwt.return (Bos.OS.Dir.create staging_output_dir) >>= fun _ ->
save_files output_dir staging_output_dir artifacts >>= fun artifacts ->
Lwt_result.return artifacts
let commit_files datadir staging_dir job_name uuid =
let commit_files datadir staging_dir job_name uuid artifacts =
(* First we move the artifacts *)
List.fold_left
(fun r artifact ->
r >>= fun () ->
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
let src = Fpath.(staging_dir / sha256) in
let dest = Fpath.(datadir // artifact_path artifact) in
Lwt.return (Bos.OS.Dir.create (Fpath.parent dest)) >>= fun _created ->
Lwt.return (Bos.OS.Path.move ~force:true src dest))
(Lwt_result.return ())
artifacts >>= fun () ->
(* Now the staging dir only contains script & console *)
let job_dir = Fpath.(datadir / job_name) in
let dest = Fpath.(job_dir / Uuidm.to_string uuid) in
Lwt.return (Bos.OS.Dir.create job_dir) >>= fun _ ->
@ -249,8 +253,15 @@ let infer_section_and_synopsis artifacts =
in
let infer_section switch root =
let root_pkg = root.OpamPackage.name in
let is_unikernel =
(* since mirage 4.2.0, the x-mirage-opam-lock-location is emitted *)
Option.value ~default:false
(Option.map (fun opam ->
Option.is_some (OpamFile.OPAM.extended opam "x-mirage-opam-lock-location" Fun.id))
(OpamPackage.Name.Map.find_opt root_pkg switch.OpamFile.SwitchExport.overlays))
in
let root_pkg_name = OpamPackage.Name.to_string root_pkg in
if Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
if is_unikernel || Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
let metrics_influx =
let influx = OpamPackage.Name.of_string "metrics-influx" in
OpamPackage.Set.exists (fun p -> OpamPackage.Name.equal p.OpamPackage.name influx)
@ -295,7 +306,8 @@ let compute_input_id artifacts =
get_hash (Fpath.v "build-environment"),
get_hash (Fpath.v "system-packages")
with
| Some a, Some b, Some c -> Some (Mirage_crypto.Hash.SHA256.digest (Cstruct.concat [a;b;c]))
| Some a, Some b, Some c ->
Some Digestif.SHA256.(to_raw_string (digestv_string [a;b;c]))
| _ -> None
let save_console_and_script staging_dir job_name uuid console script =
@ -317,6 +329,25 @@ let prepare_staging staging_dir =
then Lwt_result.fail (`Msg "build directory already exists")
else Lwt_result.return ()
(* saving:
- for each artifact compute its sha256 checksum -- calling Lwt.pause in
between
- lookup artifact sha256 in the database and filter them out of the list: not_in_db
- mkdir -p _staging/uuid/
- save console & script to _staging/uuid/
- save each artifact in not_in_db as _staging/uuid/sha256
committing:
- for each artifact mv _staging/uuid/sha256 _artifacts/sha256
(or _artifacts/prefix(sha256)/sha256 where prefix(sha256) is the first two hex digits in sha256)
- now _staging/uuid only contains console & script so we mv _staging/uuid _staging/job/uuid
potential issues:
- race condition in uploading same artifact:
* if the artifact already exists in the database and thus filesystem then nothing is done
* if the artifact is added to the database and/or filesystem we atomically overwrite it
- input_id depends on a sort order?
*)
let add_build
~datadir
~cachedir
@ -337,16 +368,35 @@ let add_build
e)
x
in
let artifacts_to_preserve =
let not_interesting p =
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes"
in
List.filter (fun (p, _) -> not (not_interesting p)) raw_artifacts
let not_interesting p =
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes"
in
begin
List.fold_left
(fun r (filepath, data) ->
r >>= fun acc ->
if not_interesting filepath then
Lwt_result.return acc
else
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data))
and size = String.length data in
Lwt_result.ok (Lwt.pause ()) >|= fun () ->
({ filepath; sha256; size }, data) :: acc)
(Lwt_result.return [])
raw_artifacts
end >>= fun artifacts ->
or_cleanup (prepare_staging staging_dir) >>= fun () ->
or_cleanup (save_console_and_script staging_dir job_name uuid console job.Builder.script)
>>= fun (console, script) ->
or_cleanup (save_all staging_dir job uuid artifacts_to_preserve) >>= fun artifacts ->
List.fold_left
(fun r ((f, _) as artifact) ->
r >>= fun acc ->
Db.find Builder_db.Build_artifact.exists f.sha256 >|= fun exists ->
if exists then acc else artifact :: acc)
(Lwt_result.return [])
artifacts >>= fun artifacts_to_save ->
or_cleanup (save_artifacts staging_dir artifacts_to_save) >>= fun () ->
let artifacts = List.map fst artifacts in
let r =
Db.start () >>= fun () ->
Db.exec Job.try_add job_name >>= fun () ->
@ -414,8 +464,8 @@ let add_build
Db.exec Build_artifact.add (file, id))
(Lwt_result.return ())
remaining_artifacts_to_add >>= fun () ->
Db.commit () >>= fun () ->
commit_files datadir staging_dir job_name uuid >|= fun () ->
commit_files datadir staging_dir job_name uuid (List.map fst artifacts_to_save) >>= fun () ->
Db.commit () >|= fun () ->
main_binary
in
Lwt_result.bind_lwt_error (or_cleanup r)
@ -434,7 +484,7 @@ let add_build
and uuid = Uuidm.to_string uuid
and job = job.name
and platform = job.platform
and `Hex sha256 = Hex.of_cstruct main_binary.sha256
and sha256 = Ohex.encode main_binary.sha256
in
let fp_str p = Fpath.(to_string (datadir // p)) in
let args =
@ -444,7 +494,8 @@ let add_build
"--uuid=" ^ uuid ; "--platform=" ^ platform ;
"--cache-dir=" ^ Fpath.to_string cachedir ;
"--data-dir=" ^ Fpath.to_string datadir ;
fp_str main_binary.localpath ])
"--main-binary-filepath=" ^ Fpath.to_string main_binary.filepath ;
fp_str Fpath.(datadir // artifact_path main_binary) ])
in
Log.debug (fun m -> m "executing hooks with %s" args);
let dir = Fpath.(configdir / "upload-hooks") in

View file

@ -5,6 +5,7 @@ val pp_error : Format.formatter -> error -> unit
val not_found : 'a option -> ('a, [> `Not_found ]) result Lwt.t
val staging : Fpath.t -> Fpath.t
val artifact_path : Builder_db.file -> Fpath.t
val cleanup_staging : Fpath.t -> Caqti_lwt.connection ->
(unit, [> `Msg of string ]) result Lwt.t
@ -30,9 +31,9 @@ val build : Uuidm.t -> Caqti_lwt.connection ->
([`build] Builder_db.id * Builder_db.Build.t, [> error ]) result Lwt.t
val build_with_main_binary : [`job] Builder_db.id -> string -> Caqti_lwt.connection ->
((Builder_db.Build.t * Builder_db.file option) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
((Builder_db.Build.t * Builder_db.file) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
val build_hash : Cstruct.t -> Caqti_lwt.connection ->
val build_hash : string -> Caqti_lwt.connection ->
((string * Builder_db.Build.t) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
val build_exists : Uuidm.t -> Caqti_lwt.connection ->

View file

@ -45,85 +45,30 @@ let compare_pkgs p1 p2 =
in
diff_map (parse_pkgs p1) (parse_pkgs p2)
module Omd = struct
let make_safe omd =
let rec safe_block = function
| Omd.Paragraph (attr, inline) ->
safe_inline inline
|> Option.map (fun inline -> Omd.Paragraph (attr, inline))
| Omd.List (attr, typ, spacing, blocks) ->
let blocks = List.filter_map (fun b ->
let b = List.filter_map safe_block b in
if b = [] then None else Some b)
blocks
in
if blocks = [] then None else
Some (Omd.List (attr, typ, spacing, blocks))
| Omd.Blockquote (attr, blocks) ->
let blocks = List.filter_map safe_block blocks in
if blocks = [] then None else
Some (Omd.Blockquote (attr, blocks))
| Omd.Heading (attr, level, inline) ->
safe_inline inline
|> Option.map (fun inline -> Omd.Heading (attr, level, inline))
| Omd.Html_block _ -> None
| Omd.Definition_list (attr, def_elts) ->
let def_elts = List.filter_map safe_def_elts def_elts in
if def_elts = [] then None else
Some (Omd.Definition_list (attr, def_elts))
| Omd.Code_block _
| Omd.Thematic_break _ as v -> Some v
and safe_def_elts { term ; defs } =
let defs = List.filter_map safe_inline defs in
safe_inline term
|> Option.map (fun term -> { Omd.term ; defs })
and safe_inline = function
| Concat (attr, inline) ->
Some (Concat (attr, List.filter_map safe_inline inline))
| Emph (attr, inline) ->
safe_inline inline
|> Option.map (fun inline -> Omd.Emph (attr, inline))
| Strong (attr, inline) ->
safe_inline inline
|> Option.map (fun inline -> Omd.Strong (attr, inline))
| Link (attr, link) ->
begin match safe_link link with
| `No_label | `Relative -> safe_inline link.Omd.label
| `Link l -> Some (Omd.Link (attr, l))
end
| Image (attr, link) ->
begin match safe_link link with
| `No_label | `Relative -> None
| `Link l -> Some (Omd.Image (attr, l))
end
| Html _ -> None
| Text _
| Code _
| Hard_break _
| Soft_break _ as v -> Some v
and safe_link ({ label ; destination ; _ } as l) =
let absolute_link =
String.(length destination >= 2 && equal (sub destination 0 2) "//") ||
String.(length destination >= 7 && equal (sub destination 0 7) "http://") ||
String.(length destination >= 8 && equal (sub destination 0 8) "https://")
in
if absolute_link then
match safe_inline label with
| None -> `No_label
| Some label -> `Link { l with label }
else
`Relative
in
List.filter_map safe_block omd
let html_of_string markdown =
markdown
|> Omd.of_string
|> make_safe
|> Omd.to_html
end
let md_to_html ?adjust_heading ?(safe = true) data =
let open Cmarkit in
let doc = Doc.of_string ~strict:false ~heading_auto_ids:true data in
let doc =
Option.fold ~none:doc
~some:(fun lvl ->
let block _m = function
| Block.Heading (h, meta) ->
let open Block.Heading in
let level = level h
and id = id h
and layout = layout h
and inline = inline h
in
let h' = make ?id ~layout ~level:(level + lvl) inline in
Mapper.ret (Block.Heading (h', meta))
| Block.Blocks _ -> Mapper.default
| x -> Mapper.ret x
in
let mapper = Mapper.make ~block () in
Mapper.map_doc mapper doc)
adjust_heading
in
Cmarkit_html.of_doc ~safe doc
module Path = struct

View file

@ -98,7 +98,7 @@ let make_breadcrumbs nav =
txtf "Job %s" job_name, Link.Job.make ~job_name ();
(
txtf "%a" pp_platform platform,
Link.Job.make ~job_name ~queries ()
Link.Job.make ~job_name ~queries ()
)
]
| `Build (job_name, build) ->
@ -122,7 +122,7 @@ let make_breadcrumbs nav =
txtf "Comparison between %s@%a and %s@%a"
job_left pp_ptime build_left.Builder_db.Build.start
job_right pp_ptime build_right.Builder_db.Build.start,
Link.Compare_builds.make
Link.Compare_builds.make
~left:build_left.uuid
~right:build_right.uuid ()
);
@ -188,7 +188,7 @@ let artifact
~basename
~job_name
~build
~file:{ Builder_db.filepath; localpath = _; sha256; size }
~file:{ Builder_db.filepath; sha256; size }
=
let artifact_link =
Link.Job_build_artifact.make
@ -202,7 +202,7 @@ let artifact
else txtf "%a" Fpath.pp filepath
];
H.txt " ";
H.code [txtf "SHA256:%a" Hex.pp (Hex.of_cstruct sha256)];
H.code [txtf "SHA256:%s" (Ohex.encode sha256)];
txtf " (%a)" Fmt.byte_size size;
]
@ -218,7 +218,7 @@ let page_not_found ~target ~referer =
| None -> []
| Some prev_url -> [
H.p [
H.txt "Go back to ";
H.txt "Go back to ";
H.a ~a:H.[ a_href prev_url ] [ H.txt prev_url ];
];
]
@ -274,9 +274,9 @@ The filename suffix of the unikernel binary indicate the expected execution envi
A persistent link to the latest successful build is available as
`/job/*jobname*/build/latest/`. Each build can be reproduced with
[orb](https://github.com/roburio/orb/). The builds are scheduled and executed
daily by [builder](https://github.com/roburio/builder/). This web interface is
[builder-web](https://git.robur.io/robur/builder-web/). Read further information
[orb](https://github.com/robur-coop/orb/). The builds are scheduled and executed
daily by [builder](https://github.com/robur-coop/builder/). This web interface is
[builder-web](https://git.robur.coop/robur/builder-web/). Read further information
[on our project page](https://robur.coop/Projects/Reproducible_builds). This
work has been funded by the European Union under the
[NGI Pointer](https://pointer.ngi.eu) program. Contact team ATrobur.coop if you
@ -285,7 +285,7 @@ have questions or suggestions.
let make_header =
[
H.Unsafe.data (Utils.Omd.html_of_string data);
H.Unsafe.data (Utils.md_to_html data);
H.form ~a:H.[a_action "/hash"; a_method `Get] [
H.label [
H.txt "Search artifact by SHA256";
@ -319,18 +319,13 @@ have questions or suggestions.
~build:latest_build.Builder_db.Build.uuid ()]
[txtf "%a" pp_ptime latest_build.Builder_db.Build.start];
H.txt " ";
]
@ (match latest_artifact with
| Some main_binary ->
artifact
~basename:true
~job_name
~build:latest_build
~file:main_binary
| None ->
[ txtf "Build failure: %a" Builder.pp_execution_result
latest_build.Builder_db.Build.result ]
)
@ artifact
~basename:true
~job_name
~build:latest_build
~file:latest_artifact
@ [ H.br () ]
let make_jobs jobs =
@ -361,14 +356,23 @@ have questions or suggestions.
H.txt "View the latest failed builds ";
H.a ~a:H.[a_href "/failed-builds"]
[H.txt "here"];
H.txt "."
H.txt ".";
]]
let make section_job_map =
let make_all_or_active all =
[ H.p [
H.txt (if all then "View active jobs " else "View all jobs ");
H.a ~a:H.[a_href (if all then "/" else "/all-builds")]
[H.txt "here"];
H.txt ".";
]]
let make ~all section_job_map =
layout ~title:"Reproducible OPAM builds"
(make_header
@ make_body section_job_map
@ make_failed_builds)
@ make_failed_builds
@ make_all_or_active all)
end
@ -383,7 +387,7 @@ module Job = struct
[
H.h2 ~a:H.[a_id "readme"] [H.txt "README"];
H.a ~a:H.[a_href "#builds"] [H.txt "Skip to builds"];
H.Unsafe.data (Utils.Omd.html_of_string data)
H.Unsafe.data (Utils.md_to_html ~adjust_heading:2 data)
]
)
@ -393,7 +397,7 @@ module Job = struct
check_icon build.Builder_db.Build.result;
txtf " %s " build.platform;
H.a ~a:H.[
a_href @@ Link.Job_build.make
a_href @@ Link.Job_build.make
~job_name
~build:build.Builder_db.Build.uuid () ]
[
@ -431,7 +435,7 @@ module Job = struct
H.txt "." ]
else
H.p [
H.txt "Including failed builds " ;
H.txt "Including failed builds " ;
H.a ~a:H.[
a_href @@ Link.Job.make_failed ~job_name ~queries ()
]
@ -487,7 +491,7 @@ module Job_build = struct
pp_devices block_devices pp_devices net_devices]
in
let aux (file:Builder_db.file) =
let (`Hex sha256_hex) = Hex.of_cstruct file.sha256 in
let sha256_hex = Ohex.encode file.sha256 in
[
H.dt [
H.a ~a:H.[a_href @@ Link.Job_build_artifact.make
@ -582,7 +586,7 @@ module Job_build = struct
| Some b when not (Uuidm.equal build.uuid b.Builder_db.Build.uuid) ->
[ H.li [ H.txt ctx;
H.a ~a:[
H.a_href @@ Link.Compare_builds.make
H.a_href @@ Link.Compare_builds.make
~left:b.uuid
~right:build.uuid () ]
[txtf "%a" pp_ptime b.start]]
@ -679,10 +683,10 @@ module Job_build = struct
font-weight: bold;\
"
]
let make_viz_section ~job_name ~artifacts ~uuid =
let viz_deps =
let iframe =
let viz_deps =
let iframe =
let src = Link.Job_build_artifact.make ~job_name ~build:uuid
~artifact:`Viz_dependencies () in
H.iframe ~a:H.[
@ -693,11 +697,11 @@ module Job_build = struct
in
let descr_txt = "\
This is an interactive visualization of dependencies, \
focusing on how shared dependencies are.
focusing on how shared dependencies are.
In the middle you see the primary package. \
Edges shoot out to its direct \
dependencies, including build dependencies.
dependencies, including build dependencies.
From these direct dependencies, edges shoot out to sets \
of their own respective direct dependencies. \
@ -714,7 +718,7 @@ dependency.\
[ iframe; H.br (); make_description descr_txt ]
in
let viz_treemap = lazy (
let iframe =
let iframe =
let src = Link.Job_build_artifact.make ~job_name ~build:uuid
~artifact:`Viz_treemap () in
H.iframe ~a:H.[
@ -726,7 +730,7 @@ dependency.\
let descr_txt = "\
This interactive treemap shows the space-usage of modules/libraries inside the \
ELF binary. You can get more info from each block by \
hovering over them.
hovering over them.
On top of the treemap there is a scale, showing how much space the \
treemap itself constitutes of the binary, the excluded symbols/modules \
@ -860,7 +864,7 @@ let compare_builds
~(build_right : Builder_db.Build.t)
~env_diff:(added_env, removed_env, changed_env)
~pkg_diff:(added_pkgs, removed_pkgs, changed_pkgs)
~opam_diff:(opam_diff, version_diff, left, right, duniverse_content_diff, duniverse_left, duniverse_right)
~opam_diff:(opam_diff, version_diff, left, right, duniverse)
=
let items, data =
List.fold_left (fun (items, data) (id, txt, amount, code) ->
@ -871,33 +875,39 @@ let compare_builds
H.li [ H.a ~a:[H.a_href id_href] [txtf "%d %s" amount txt] ] :: items,
data @ H.h3 ~a:[H.a_id id] [H.txt txt] :: code)
([], [])
[ ("opam-packages-removed", "Opam packages removed",
OpamPackage.Set.cardinal left, [ H.code (packages left) ]) ;
("opam-packages-installede", "New opam packages installed",
OpamPackage.Set.cardinal right, [ H.code (packages right) ]) ;
("opam-packages-version-diff", "Opam packages with version changes",
List.length version_diff, [ H.code (package_diffs version_diff) ]) ;
("duniverse-dirs-removed", "Duniverse directories removed",
List.length duniverse_left, [ H.code (duniverse_dirs duniverse_left) ]) ;
("duniverse-dirs-installed", "New duniverse directories installed",
List.length duniverse_right, [ H.code (duniverse_dirs duniverse_right) ]) ;
("duniverse-dirs-content-diff", "Duniverse directories with content changes",
List.length duniverse_content_diff, [ H.code (duniverse_diffs duniverse_content_diff) ]) ;
("opam-packages-opam-diff", "Opam packages with changes in their opam file",
List.length opam_diff, opam_diffs opam_diff) ;
("env-removed", "Environment variables removed",
List.length removed_env, [ H.code (key_values removed_env) ]) ;
("env-added", "New environment variables added",
List.length added_env, [ H.code (key_values added_env) ]) ;
("env-changed", "Environment variables changed",
List.length changed_env, [ H.code (key_value_changes changed_env) ]) ;
("pkgs-removed", "System packages removed",
List.length removed_pkgs, [ H.code (key_values removed_pkgs) ]) ;
("pkgs-added", "New system packages added",
List.length added_pkgs, [ H.code (key_values added_pkgs) ]) ;
("pkgs-changed", "System packages changed",
List.length changed_pkgs, [ H.code (key_value_changes changed_pkgs) ]) ;
]
([ ("opam-packages-removed", "Opam packages removed",
OpamPackage.Set.cardinal left, [ H.code (packages left) ]) ;
("opam-packages-installede", "New opam packages installed",
OpamPackage.Set.cardinal right, [ H.code (packages right) ]) ;
("opam-packages-version-diff", "Opam packages with version changes",
List.length version_diff, [ H.code (package_diffs version_diff) ]) ;
] @ (match duniverse with
| Ok (duniverse_left, duniverse_right, duniverse_content_diff) ->
[
("duniverse-dirs-removed", "Duniverse directories removed",
List.length duniverse_left, [ H.code (duniverse_dirs duniverse_left) ]) ;
("duniverse-dirs-installed", "New duniverse directories installed",
List.length duniverse_right, [ H.code (duniverse_dirs duniverse_right) ]) ;
("duniverse-dirs-content-diff", "Duniverse directories with content changes",
List.length duniverse_content_diff, [ H.code (duniverse_diffs duniverse_content_diff) ]) ;
]
| Error `Msg msg -> [ "duniverse-dirs-error", "Duniverse parsing error", 1, [ H.txt msg ] ]
) @ [
("opam-packages-opam-diff", "Opam packages with changes in their opam file",
List.length opam_diff, opam_diffs opam_diff) ;
("env-removed", "Environment variables removed",
List.length removed_env, [ H.code (key_values removed_env) ]) ;
("env-added", "New environment variables added",
List.length added_env, [ H.code (key_values added_env) ]) ;
("env-changed", "Environment variables changed",
List.length changed_env, [ H.code (key_value_changes changed_env) ]) ;
("pkgs-removed", "System packages removed",
List.length removed_pkgs, [ H.code (key_values removed_pkgs) ]) ;
("pkgs-added", "New system packages added",
List.length added_pkgs, [ H.code (key_values added_pkgs) ]) ;
("pkgs-changed", "System packages changed",
List.length changed_pkgs, [ H.code (key_value_changes changed_pkgs) ]) ;
])
in
layout
~nav:(`Comparison ((job_left, build_left), (job_right, build_right)))

View file

@ -1,7 +1,5 @@
module Set = OpamPackage.Set
type package = OpamPackage.t
let packages (switch : OpamFile.SwitchExport.t) =
assert (Set.cardinal switch.selections.sel_pinned = 0);
assert (Set.cardinal switch.selections.sel_compiler = 0);
@ -39,7 +37,11 @@ let duniverse_dirs_data =
in
let* dir = string ~ctx:"directory" dir in
Ok (url, dir, List.rev hashes)
| _ -> Error (`Msg "expected a string or identifier")
| { pelem = List { pelem = [ url ; dir ] ; _ } ; _ } ->
let* url = string ~ctx:"url" url in
let* dir = string ~ctx:"directory" dir in
Ok (url, dir, [])
| _ -> Error (`Msg "expected a list of URL, DIR, [HASHES]")
in
function
| { pelem = List { pelem = lbody ; _ } ; _ } ->
@ -54,15 +56,15 @@ let duniverse (switch : OpamFile.SwitchExport.t) =
let root = switch.OpamFile.SwitchExport.selections.OpamTypes.sel_roots in
if OpamPackage.Set.cardinal root = 1 then
let root = OpamPackage.Set.choose root in
Option.bind
OpamPackage.(Name.Map.find_opt root.name switch.OpamFile.SwitchExport.overlays)
(fun opam ->
match OpamFile.OPAM.extended opam duniverse_dir duniverse_dirs_data with
| None -> None
| Some Error _ -> None
| Some Ok v -> Some v)
match OpamPackage.(Name.Map.find_opt root.name switch.OpamFile.SwitchExport.overlays) with
| None -> Error (`Msg "opam switch export doesn't contain the main package")
| Some opam ->
match OpamFile.OPAM.extended opam duniverse_dir duniverse_dirs_data with
| None -> Ok None
| Some Error e -> Error e
| Some Ok v -> Ok (Some v)
else
None
Error (`Msg "not a single root package found in opam switch export")
type duniverse_diff = {
name : string ;
@ -89,11 +91,19 @@ let duniverse_diff l r =
let keys_l_only = ref [] and keys_r_only = ref [] and diff = ref [] in
let equal_hashes l r =
(* l and r are lists of pairs, with the hash kind and its value *)
(* for a git remote, the hashes are empty lists *)
(match l with [] -> false | _ -> true) &&
(match r with [] -> false | _ -> true) &&
List.for_all (fun (h, v) ->
match List.assoc_opt h r with
| None -> true
| None -> false
| Some v' -> String.equal v v')
l
l &&
List.for_all (fun (h, v) ->
match List.assoc_opt h l with
| None -> false
| Some v' -> String.equal v v')
r
in
let _ =
M.merge (fun key l r ->
@ -102,6 +112,7 @@ let duniverse_diff l r =
| Some _, None -> keys_l_only := key :: !keys_l_only; None
| None, None -> None
| Some (_, l), Some (_, r) when equal_hashes l r -> None
| Some (url1, []), Some (url2, []) when String.equal url1 url2 -> None
| Some l, Some r -> diff := (key, l, r) :: !diff; None)
l r
in
@ -258,8 +269,9 @@ let compare left right =
and right_pkgs = diff packages_right packages_left
in
let opam_diff = detailed_opam_diffs left right opam_diff in
let left_duniverse, right_duniverse, duniverse_diff =
duniverse_diff (duniverse left) (duniverse right)
let duniverse_ret =
match duniverse left, duniverse right with
| Ok l, Ok r -> Ok (duniverse_diff l r)
| Error _ as e, _ | _, (Error _ as e) -> e
in
(opam_diff, version_diff, left_pkgs, right_pkgs,
duniverse_diff, left_duniverse, right_duniverse)
(opam_diff, version_diff, left_pkgs, right_pkgs, duniverse_ret)

39
opamdiff/opamdiff.mli Normal file
View file

@ -0,0 +1,39 @@
type opam_diff = {
pkg : OpamPackage.t ;
build : (OpamTypes.command list * OpamTypes.command list) option ;
install : (OpamTypes.command list * OpamTypes.command list) option ;
url : (OpamFile.URL.t option * OpamFile.URL.t option) option ;
otherwise_equal : bool ;
}
type version_diff = {
name : OpamPackage.Name.t;
version_left : OpamPackage.Version.t;
version_right : OpamPackage.Version.t;
}
type duniverse_diff = {
name : string ;
urls : string * string option ;
hash : (OpamHash.kind * string option * string option) list ;
}
val pp_opampackage : Format.formatter -> OpamPackage.t -> unit
val pp_version_diff : Format.formatter -> version_diff -> unit
val pp_duniverse_diff : Format.formatter -> duniverse_diff -> unit
val pp_duniverse_dir : Format.formatter -> string * string -> unit
val pp_opam_diff : Format.formatter -> opam_diff -> unit
val commands_to_strings : OpamTypes.command list * OpamTypes.command list -> string list * string list
val opt_url_to_string : OpamFile.URL.t option * OpamFile.URL.t option -> string * string
val compare: OpamFile.SwitchExport.t ->
OpamFile.SwitchExport.t ->
opam_diff list * version_diff list * OpamPackage.Set.t * OpamPackage.Set.t * ((string * string) list * (string * string) list * duniverse_diff list, [> `Msg of string ]) result

View file

@ -32,6 +32,8 @@ Options:
Hex encoded SHA256 digest of the main binary.
--job=STRING
Job name that was built.
--main-binary-filepath=STRING
The file path of the main binary.
EOM
exit 1
}
@ -39,6 +41,7 @@ EOM
BUILD_TIME=
SHA=
JOB=
FILEPATH=
while [ $# -gt 1 ]; do
OPT="$1"
@ -53,6 +56,9 @@ while [ $# -gt 1 ]; do
--job=*)
JOB="${OPT##*=}"
;;
--main-binary-filepath=*)
FILEPATH="${OPT##*=}"
;;
--*)
warn "Ignoring unknown option: '${OPT}'"
;;
@ -67,13 +73,14 @@ done
[ -z "${BUILD_TIME}" ] && die "The --build-time option must be specified"
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
[ -z "${JOB}" ] && die "The --job option must be specified"
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
FILENAME="${1}"
: "${REPO:="/usr/local/www/pkg"}"
: "${REPO_KEY:="/usr/local/etc/builder-web/repo.key"}"
if [ "$(basename "${FILENAME}" .pkg)" = "$(basename "${FILENAME}")" ]; then
if [ "$(basename "${FILEPATH}" .pkg)" = "$(basename "${FILEPATH}")" ]; then
echo "Not a FreeBSD package"
exit 0
fi
@ -124,6 +131,7 @@ PKG_DIR="${REPO_DIR}/All"
# and then move it before recreating the index
pkg create -r "${PKG_ROOT}" -m "${MANIFEST}" -o "${TMP}"
mkdir -p "${PKG_DIR}"
rm -f "${PKG_DIR}"/"${NAME}"-*.pkg
mv "${TMP}/${NAME}-${FULL_VERSION}.pkg" "${PKG_DIR}"
pkg repo "${REPO_DIR}" "${REPO_KEY}"

View file

@ -2,7 +2,7 @@ name: builder-web
version: %%VERSION_NUM%%
origin: local/builder-web
comment: Builder web service
www: https://git.robur.io/robur/builder-web
www: https://git.robur.coop/robur/builder-web
maintainer: Robur <team@robur.coop>
prefix: /usr/local
licenselogic: single

View file

@ -33,7 +33,7 @@ procname="/usr/local/libexec/builder-web"
builder_web_start () {
echo "Starting ${name}."
/usr/sbin/daemon -S -p "${pidfile}" -u "${builder_web_user}" \
/usr/sbin/daemon -S -r -P "${pidfile}" -u "${builder_web_user}" \
"${procname}" ${builder_web_flags}
}

View file

@ -26,7 +26,7 @@ die()
usage()
{
cat <<EOM 1>&2
usage: ${prog_NAME} [ OPTIONS ]
usage: ${prog_NAME} [ OPTIONS ]
Generates visualizations of all things
--data-dir=STRING
Path to the data directory.
@ -77,6 +77,8 @@ done
DB="${DATA_DIR}/builder.sqlite3"
[ ! -e "$DB" ] && die "The database doesn't exist: '$DB'"
# Let's be somewhat lenient with the database version.
# In visualizations.sh we can be more strict.
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
[ "$DB_VERSION" -lt 16 ] && die "The database version should be >= 16. It is '$DB_VERSION'"
@ -85,7 +87,7 @@ APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be = 1234839235. It is '$APP_ID'"
echo
echo
echo "-----------------------------------------------------------------------------"
info "Starting batch creation of visualizations: $(date)"
@ -127,8 +129,22 @@ fi
ATTEMPTED_VIZS=0
FAILED_VIZS=0
for i in $(find "${DATA_DIR}" -type f -path \*output/bin\*); do
UUID=$(echo "${i}" | rev | cut -d '/' -f 4 | rev)
distinct-input () {
{
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
FROM build b
JOIN build_artifact opam ON opam.build = b.id
WHERE opam.filepath = 'opam-switch' AND b.main_binary NOT NULL
GROUP BY opam.sha256;"
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
FROM build b
JOIN build_artifact debug ON debug.build = b.id
WHERE debug.filepath LIKE '%.debug' AND b.main_binary NOT NULL
GROUP BY debug.sha256;"
} | sort -u
}
for UUID in $(distinct-input); do
if ! "$VISUALIZATIONS_CMD" \
--data-dir="${DATA_DIR}" \
--cache-dir="${CACHE_DIR}" \

View file

@ -15,11 +15,14 @@ freebsd_sanitize_version () {
exit 1;
fi
if [ $version_with_commit -eq 0 ]; then
v="${v}.0.g0000000"
v="${v}.0.g0000000.${post}"
else
v="${v}.${post}"
fi
echo $v
}
echo "using FreeBSD pkg to compare versions now:"
while read version_a version_b; do
version_a=$(freebsd_sanitize_version $version_a)
version_b=$(freebsd_sanitize_version $version_b)
@ -27,7 +30,28 @@ while read version_a version_b; do
printf "%s %s %s\n" "$version_a" "$result" "$version_b"
done < versions.txt
debian_sanitize_version () {
post=$(echo $1 | rev | cut -d '-' -f 1-2 | rev)
v=$(echo $1 | rev | cut -d '-' -f 3- | rev)
version_good=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$')
version_with_commit=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$')
if [ $version_good -eq 0 -a $version_with_commit -eq 0 ]; then
echo "invalid version $v";
exit 1;
fi
if [ $version_with_commit -eq 0 ]; then
v="${v}-0-g0000000-${post}"
else
v="${v}-${post}"
fi
echo $v
}
echo ""
echo "using Debian dpkg to compare versions now:"
while read version_a version_b; do
version_a=$(debian_sanitize_version $version_a)
version_b=$(debian_sanitize_version $version_b)
if dpkg --compare-versions "$version_a" lt "$version_b"; then
echo "$version_a < $version_b"
else

View file

@ -4,9 +4,9 @@ Section: unknown
Priority: optional
Maintainer: Robur Team <team@robur.coop>
Standards-Version: 4.4.1
Homepage: https://git.robur.io/robur/builder-web
Vcs-Browser: https://git.robur.io/robur/builder-web
Vcs-Git: https://git.robur.io/robur/builder-web.git
Homepage: https://git.robur.coop/robur/builder-web
Vcs-Browser: https://git.robur.coop/robur/builder-web
Vcs-Git: https://git.robur.coop/robur/builder-web.git
Architecture: all
Depends: libgmp10, libsqlite3-0, libev4, opam-graph, modulectomy
Description: Web service for storing and presenting builds.

View file

@ -1,7 +1,7 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: builder-web
Upstream-Contact: Robur Team <team@robur.coop>
Source: https://git.robur.io/robur/builder-web
Source: https://git.robur.coop/robur/builder-web
Files: *
Copyright: "Reynir Björnsson <reynir@reynir.dk>" "Hannes Mehnert <hannes@mehnert.org>"

View file

@ -36,6 +36,8 @@ Options:
Job name that was built.
--platform=STRING
Platform name on which the build was performed.
--main-binary-filepath=STRING
The file path of the main binary.
EOM
exit 1
}
@ -44,6 +46,7 @@ BUILD_TIME=
SHA=
JOB=
PLATFORM=
FILEPATH=
while [ $# -gt 1 ]; do
OPT="$1"
@ -61,6 +64,9 @@ while [ $# -gt 1 ]; do
--platform=*)
PLATFORM="${OPT##*=}"
;;
--main-binary-filepath=*)
FILEPATH="${OPT##*=}"
;;
--*)
warn "Ignoring unknown option: '${OPT}'"
;;
@ -76,10 +82,11 @@ done
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
[ -z "${JOB}" ] && die "The --job option must be specified"
[ -z "${PLATFORM}" ] && die "The --platform option must be specified"
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
FILENAME="${1}"
if [ $(basename "${FILENAME}" .deb) = $(basename "${FILENAME}") ]; then
if [ $(basename "${FILEPATH}" .deb) = $(basename "${FILEPATH}") ]; then
echo "Not a Debian package"
exit 0
fi
@ -104,6 +111,16 @@ mkdir "${PKG_ROOT}"
dpkg-deb -R "${FILENAME}" "${PKG_ROOT}"
VERSION=$(dpkg-deb -f "${FILENAME}" Version)
# if we've a tagged version (1.5.0), append the number of commits and a dummy hash
VERSION_GOOD=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$') || true
VERSION_WITH_COMMIT=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$') || true
if [ $VERSION_GOOD -eq 0 -a $VERSION_WITH_COMMIT -eq 0 ]; then
die "version does not conform to (MAJOR.MINOR.PATCH[-#NUM_COMMITS-g<HASH>])"
fi
if [ $VERSION_WITH_COMMIT -eq 0 ]; then
VERSION="${VERSION}-0-g0000000"
fi
NEW_VERSION="${VERSION}"-"${BUILD_TIME}"-"${SHA}"
sed -i "" -e "s/Version:.*/Version: ${NEW_VERSION}/g" "${PKG_ROOT}/DEBIAN/control"
@ -114,6 +131,8 @@ if ! aptly repo show "${PLATFORM}" > /dev/null 2>&1; then
aptly repo create --distribution="${PLATFORM}" "${PLATFORM}"
fi
PACKAGE=$(dpkg-deb -f "${FILENAME}" Package)
aptly repo remove "${PLATFORM}" "${PACKAGE}"
aptly repo add "${PLATFORM}" "${TMP}"
: "${REPO_KEYID:="D5E2DC92617877EDF7D4FD4345EA05FB7E26053D"}"

View file

@ -6,3 +6,4 @@
3.0.0-20230101-abcd 3.0.1-20230204-bdbd
1.5.0-20220516-a0d5a2 1.5.0-3-g26b5a59-20220527-0bc180
1.5.0-3-g26b5a59-20220527-0bc180 1.5.1-20220527-0bc180
0.1.0-20221120104301-f9e456637274844d45d9758ec661a136d0cda7966b075e4426b69fe6da00427b 0.1.0-237-g62965d4-20230527202149-6118c39221f318154e234098b5cffd4dc1d80f19cf2200cc6b1eb768dbf6decb

View file

@ -70,46 +70,59 @@ done
[ -z "${CACHE_DIR}" ] && die "The --cache-dir option must be specified"
[ -z "${DATA_DIR}" ] && die "The --data-dir option must be specified"
info "processing UUID '$UUID'"
info "processing UUID '${UUID}'"
DB="${DATA_DIR}/builder.sqlite3"
# A new visualizations.sh script may be installed during an upgrade while the
# old builder-web binary is running. In that case things can get out of sync.
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
[ "$DB_VERSION" -ne 18 ] && die "The database version should be 18. It is '$DB_VERSION'"
APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be 1234839235. It is '$APP_ID'"
get_main_binary () {
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id AND b.main_binary = ba.id
WHERE uuid = '$UUID';"
WHERE uuid = '${UUID}';"
}
BIN="${DATA_DIR}/$(get_main_binary)" || die "Failed to get main binary from database"
[ -z "${BIN}" ] && die "No main-binary found in db '$DB' for build '$UUID'"
[ -z "${BIN}" ] && die "No main-binary found in db '${DB}' for build '${UUID}'"
get_debug_binary () {
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id
WHERE
uuid = '$UUID'
AND ba.localpath LIKE '%.debug';"
uuid = '${UUID}'
AND ba.filepath LIKE '%.debug';"
}
DEBUG_BIN_RELATIVE="$(get_debug_binary)" || die "Failed to get debug binary from database"
get_opam_switch () {
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id
WHERE
uuid = '$UUID'
uuid = '${UUID}'
AND ba.filepath = 'opam-switch';"
}
OPAM_SWITCH="$(get_opam_switch)" || die "Failed to get opam switch from database"
[ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '$DB' for build '$UUID'"
[ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '${DB}' for build '${UUID}'"
OPAM_SWITCH="${DATA_DIR}/${OPAM_SWITCH}"
OPAM_GRAPH="opam-graph"
MODULECTOMY="modulectomy"
LATEST_TREEMAPVIZ_VERSION="$($MODULECTOMY --version)" || die "Failed to get modulectomy version"
LATEST_DEPENDENCIESVIZ_VERSION="$($OPAM_GRAPH --version)" || die "Failed to get opam-graph version"
LATEST_TREEMAPVIZ_VERSION="$(${MODULECTOMY} --version)" || die "Failed to get modulectomy version"
LATEST_DEPENDENCIESVIZ_VERSION="$(${OPAM_GRAPH} --version)" || die "Failed to get opam-graph version"
TREEMAP_CACHE_DIR="${CACHE_DIR}/treemap_${LATEST_TREEMAPVIZ_VERSION}"
DEPENDENCIES_CACHE_DIR="${CACHE_DIR}/dependencies_${LATEST_DEPENDENCIESVIZ_VERSION}"
@ -136,7 +149,7 @@ trap cleanup EXIT
# /// Dependencies viz
if [ ! -d "${DEPENDENCIES_CACHE_DIR}" ]; then
mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '$DEPENDENCIES_CACHE_DIR'"
mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '${DEPENDENCIES_CACHE_DIR}'"
fi
OPAM_SWITCH_FILEPATH='opam-switch'
@ -144,8 +157,8 @@ OPAM_SWITCH_FILEPATH='opam-switch'
get_opam_switch_hash () {
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id
WHERE uuid = '$UUID'
AND ba.filepath = '$OPAM_SWITCH_FILEPATH';"
WHERE uuid = '${UUID}'
AND ba.filepath = '${OPAM_SWITCH_FILEPATH}';"
}
DEPENDENCIES_INPUT_HASH="$(get_opam_switch_hash)" || die "Failed to get opam-switch hash from database"
@ -155,7 +168,8 @@ if [ -e "${DEPENDENCIES_VIZ_FILENAME}" ]; then
info "Dependency visualization already exists: '${DEPENDENCIES_VIZ_FILENAME}'"
else
if ${OPAM_GRAPH} --output-format=html "${OPAM_SWITCH}" > "${TMPDEPENDENCIES}"; then
mv "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}"
cp "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}"
rm "${TMPDEPENDENCIES}"
else
die "opam-graph failed to generate visualization"
fi
@ -173,16 +187,16 @@ stat_aux () {
fi
}
SIZE="$(stat_aux "$BIN")"
SIZE="$(stat_aux "${BIN}")"
if [ ! -d "${TREEMAP_CACHE_DIR}" ]; then
mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '$TREEMAP_CACHE_DIR'"
mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '${TREEMAP_CACHE_DIR}'"
fi
get_debug_bin_hash () {
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id
WHERE uuid = '$UUID'
WHERE uuid = '${UUID}'
AND ba.filepath LIKE '%.debug';"
}
@ -201,7 +215,8 @@ if [ -n "${DEBUG_BIN_RELATIVE}" ]; then
"${DEBUG_BIN}" \
> "${TMPTREE}"
then
mv "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}"
cp "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}"
rm "${TMPTREE}"
else
die "modulectomy failed to generate visualization"
fi

View file

@ -1,20 +1,16 @@
(test
(name test_builder_db)
(modules test_builder_db)
(libraries builder_db caqti.blocking alcotest mirage-crypto-rng.unix))
(libraries ptime.clock.os builder_db caqti.blocking alcotest mirage-crypto-rng.unix ohex))
(test
(name markdown_to_html)
(modules markdown_to_html)
(libraries builder_web alcotest))
(libraries builder_web cmarkit alcotest))
(test
(name router)
(modules router)
(libraries builder_web fmt dream yojson alcotest)
(preprocess
(pps
ppx_deriving.std
ppx_deriving_yojson
))
)
(pps ppx_deriving.std ppx_deriving_yojson)))

View file

@ -1,14 +1,14 @@
let markdown_to_html = Builder_web__Utils.Omd.html_of_string
let markdown_to_html = Builder_web__Utils.md_to_html
let test_simple () =
let markdown = {|# Hello world|} in
let html = markdown_to_html markdown in
Alcotest.(check string "simple html" "<h1>Hello world</h1>\n" html)
Alcotest.(check string "simple html" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a>Hello world</h1>\n" html)
let test_html_script () =
let markdown = {|# <script>Hello world</script>|} in
let html = markdown_to_html markdown in
Alcotest.(check string "html script header" "<h1>Hello world</h1>\n" html)
Alcotest.(check string "html script header" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a><!-- CommonMark raw HTML omitted -->Hello world<!-- CommonMark raw HTML omitted --></h1>\n" html)
let test_preserve_span_content () =
let markdown = {|* <span id="myref">My ref</span>
@ -16,10 +16,8 @@ let test_preserve_span_content () =
let html = markdown_to_html markdown in
Alcotest.(check string "html span content preserved"
{|<ul>
<li>My ref
</li>
<li>See my ref for more information
</li>
<li><!-- CommonMark raw HTML omitted -->My ref<!-- CommonMark raw HTML omitted --></li>
<li><a href="#myref">See my ref</a> for more information</li>
</ul>
|}
html)
@ -27,20 +25,21 @@ let test_preserve_span_content () =
let test_remove_script () =
let markdown = {|<script>alert(1);</script>|} in
let html = markdown_to_html markdown in
Alcotest.(check string "html script removed" "" html)
Alcotest.(check string "html script removed" "<!-- CommonMark HTML block omitted -->\n" html)
let test_list_with_html_block_and_markdown () =
let markdown = "* <div> Hello, World!</div> *this is not html*" in
let html = markdown_to_html markdown in
Alcotest.(check string "list with html block and markdown"
(*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*) ""
(*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*)
"<ul>\n<li>\n<!-- CommonMark HTML block omitted -->\n</li>\n</ul>\n"
html)
let test_list_with_inline_html_and_markdown () =
let markdown = "* <span> Hello, World!</span> *this is not html*" in
let html = markdown_to_html markdown in
Alcotest.(check string "list with html block and markdown"
"<ul>\n<li> Hello, World! <em>this is not html</em>\n</li>\n</ul>\n"
"<ul>\n<li><!-- CommonMark raw HTML omitted --> Hello, World!<!-- CommonMark raw HTML omitted --> <em>this is not html</em></li>\n</ul>\n"
html)
let test_absolute_link () =
@ -51,35 +50,131 @@ let test_absolute_link () =
let test_relative_link () =
let markdown = "[foo](../foo.jpg)" in
let html = markdown_to_html markdown in
Alcotest.(check string "relative link" "<p>foo</p>\n" html)
Alcotest.(check string "relative link" "<p><a href=\"../foo.jpg\">foo</a></p>\n" html)
let test_absolute_image () =
let markdown = "![alttext](https://foo.com/bar.jpg)" in
let html = markdown_to_html markdown in
Alcotest.(check string "absolute image"
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" /></p>\n" html)
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" ></p>\n" html)
let test_absolute_image_no_alt () =
let markdown = "![](https://foo.com/bar.jpg)" in
let html = markdown_to_html markdown in
Alcotest.(check string "absolute image"
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" /></p>\n" html)
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html)
let test_relative_image () =
let markdown = "![](/bar.jpg)" in
let html = markdown_to_html markdown in
Alcotest.(check string "relative image" "" html)
Alcotest.(check string "relative image" "<p><img src=\"/bar.jpg\" alt=\"\" ></p>\n" html)
let test_absolute_image_script_alt () =
let markdown = "![<script src=\"bla.js\"></script>](https://foo.com/bar.jpg)" in
let html = markdown_to_html markdown in
Alcotest.(check string "absolute image with script alt text"
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" /></p>\n" html)
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html)
let test_fragment_link () =
let markdown = "[fragment](#fragment)" in
let html = markdown_to_html markdown in
Alcotest.(check string "fragment link" "<p>fragment</p>\n" html)
Alcotest.(check string "fragment link" "<p><a href=\"#fragment\">fragment</a></p>\n" html)
let test_heading_adjustment () =
let markdown = {|# foo
## bar
# baz
## bazbar
### bazbarbar
#### bazbarbarbar
##### bazbarbarbarbar
###### bazbarbarbarbarbar
|}
in
let html = markdown_to_html ~adjust_heading:2 markdown in
(* NB: the maximum heading is 6 in cmarkit, thus we reduce the structure *)
let exp = {|<h3 id="foo"><a class="anchor" aria-hidden="true" href="#foo"></a>foo</h3>
<h4 id="bar"><a class="anchor" aria-hidden="true" href="#bar"></a>bar</h4>
<h3 id="baz"><a class="anchor" aria-hidden="true" href="#baz"></a>baz</h3>
<h4 id="bazbar"><a class="anchor" aria-hidden="true" href="#bazbar"></a>bazbar</h4>
<h5 id="bazbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbar"></a>bazbarbar</h5>
<h6 id="bazbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbar"></a>bazbarbarbar</h6>
<h6 id="bazbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbar"></a>bazbarbarbarbar</h6>
<h6 id="bazbarbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbarbar"></a>bazbarbarbarbarbar</h6>
|} in
Alcotest.(check string "header adjustment works fine" exp html)
let test_table () =
let markdown = {__|| a | | b | c | d | e |
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
| entry | | **bla.file** | **other.file** | | |
| _another entry_ | | **another.file** | **another.other** | | |
|__}
in
let html = markdown_to_html ~adjust_heading:2 markdown in
let exp = {|<div role="region"><table>
<tr>
<th>a</th>
<th></th>
<th>b</th>
<th>c</th>
<th>d</th>
<th>e</th>
</tr>
<tr>
<td>entry</td>
<td></td>
<td><strong>bla.file</strong></td>
<td><strong>other.file</strong></td>
<td></td>
<td></td>
</tr>
<tr>
<td><em>another entry</em></td>
<td></td>
<td><strong>another.file</strong></td>
<td><strong>another.other</strong></td>
<td></td>
<td></td>
</tr>
</table></div>|} in
Alcotest.(check string "table is rendered as html" exp html)
let test_table2 () =
let markdown = {__|| a | | b | c | d | e |
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
| entry | | | | **bla.file** | **other.file** |
| _another entry_ | | | **another.file** | **another.other** | |
|__}
in
let html = markdown_to_html ~adjust_heading:2 markdown in
let exp = {|<div role="region"><table>
<tr>
<th>a</th>
<th></th>
<th>b</th>
<th>c</th>
<th>d</th>
<th>e</th>
</tr>
<tr>
<td>entry</td>
<td></td>
<td></td>
<td></td>
<td><strong>bla.file</strong></td>
<td><strong>other.file</strong></td>
</tr>
<tr>
<td><em>another entry</em></td>
<td></td>
<td></td>
<td><strong>another.file</strong></td>
<td><strong>another.other</strong></td>
<td></td>
</tr>
</table></div>|} in
Alcotest.(check string "table is rendered as html" exp html)
let markdown_tests = [
Alcotest.test_case "Simple" `Quick test_simple;
@ -95,6 +190,9 @@ let markdown_tests = [
Alcotest.test_case "relative image" `Quick test_relative_image;
Alcotest.test_case "absolute image with script alt" `Quick test_absolute_image_script_alt;
Alcotest.test_case "fragment link" `Quick test_fragment_link;
Alcotest.test_case "heading adjustment" `Quick test_heading_adjustment;
Alcotest.test_case "table" `Quick test_table;
Alcotest.test_case "table2" `Quick test_table2;
]
let () =

View file

@ -2,7 +2,7 @@
module Param_verification = struct
(*> None is 'verified'*)
type t = wrong_type option
type t = wrong_type option
[@@deriving yojson,show,eq]
and wrong_type = {
@ -14,40 +14,30 @@ module Param_verification = struct
module P = struct
let is_string : (string * string) option -> _ option = function
| Some _ -> None
| None -> None
let is_string : (string * string) -> _ option =
Fun.const None
let is_uuid (param, value) =
match Uuidm.of_string value with
| Some _ when String.length value = 36 -> None
| _ -> Some {
param;
expected = "Uuidm.t"
}
let is_uuid = function
| Some (param, value) ->
begin match Uuidm.of_string value with
| Some _ when String.length value = 36 -> None
| _ -> Some {
param;
expected = "Uuidm.t"
}
end
| None -> None
end
let param req tag =
match Dream.param req tag with
| param -> Some (tag, param)
| exception _ -> None
let ( &&& ) v v' =
match v with
| None -> v'
| Some _ as some -> some
let verify req =
let verified_params =
P.is_string (param req "job")
&&& P.is_uuid (param req "build")
&&& P.is_uuid (param req "build_left")
&&& P.is_uuid (param req "build_right")
&&& P.is_string (param req "platform")
let verify parameters req =
let verified_params =
List.fold_left (fun acc p ->
match acc with
| None ->
if String.starts_with ~prefix:"build" p then
P.is_uuid (p, Dream.param req p)
else
P.is_string (p, Dream.param req p)
| Some _ as x -> x)
None parameters
in
let response_json =
verified_params |> to_yojson |> Yojson.Safe.to_string
@ -55,15 +45,23 @@ module Param_verification = struct
Dream.respond response_json
end
let find_parameters path =
List.filter_map (fun s ->
if String.length s > 0 && String.get s 0 = ':' then
Some (String.sub s 1 (String.length s - 1))
else
None)
(String.split_on_char '/' path)
let router =
(* XXX: this relies on [Builder_web.routes] only using {data,cache,config}dir
* in the handlers which are never called here. The path /nonexistant is
* assumed to not exist. *)
let nodir = Fpath.v "/nonexistant" in
Builder_web.routes ~datadir:nodir ~cachedir:nodir ~configdir:nodir
Builder_web.routes ~datadir:nodir ~cachedir:nodir ~configdir:nodir ~expired_jobs:0
|> List.map (fun (meth, route, _handler) ->
meth, route, Param_verification.verify)
meth, route, Param_verification.verify (find_parameters route))
|> Builder_web.to_dream_routes
|> Dream.router
(* XXX: we test without remove_trailing_url_slash to ensure we don't produce
@ -85,7 +83,7 @@ let test_link method_ target () =
Alcotest.(check' (result Param_verification.alcotyp string) ~msg:"param-verification"
~actual:body ~expected:(Ok None))
let test_link_artifact artifact =
let test_link_artifact artifact =
let job_name = "test" in
let build = Uuidm.v `V4 in
test_link `GET @@
@ -149,7 +147,7 @@ let () =
end;
test_case "Link.Failed_builds.make" `Quick begin
test_link `GET @@
Builder_web.Link.Failed_builds.make ~count:2 ~start:1 ()
Builder_web.Link.Failed_builds.make ~count:2 ~start:1 ()
end;
];
(* this doesn't actually test the redirects, unfortunately *)
@ -159,5 +157,35 @@ let () =
"/f/bin/unikernel.hvt";
"/";
"";
]
];
"Albatross hardcoded links",
[
(*> Note: these links can be found in
albatross/command-line/albatross_client_update.ml
.. to find them I follewed the trails of 'Albatross_cli.http_host'
*)
begin
let sha_str =
Digestif.SHA256.(to_raw_string (digest_string "foo"))
|> Ohex.encode
in
Fmt.str "/hash?sha256=%s" sha_str
end;
begin
let jobname = "foo" in
"/job/" ^ jobname ^ "/build/latest"
end;
begin
let job = "foo" in
let build = Uuidm.(v `V4 |> to_string) in
"/job/" ^ job ^ "/build/" ^ build ^ "/main-binary"
end;
begin
let old_uuid = Uuidm.(v `V4 |> to_string) in
let new_uuid = Uuidm.(v `V4 |> to_string) in
Fmt.str "/compare/%s/%s" old_uuid new_uuid
end;
]
|> List.map Alcotest.(fun p ->
test_case ("" ^ p) `Quick (test_link `GET p))
]

View file

@ -3,7 +3,7 @@ let ( >>| ) x f = Result.map f x
module type CONN = Caqti_blocking.CONNECTION
let () = Mirage_crypto_rng_unix.initialize ()
let () = Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna)
let iter f xs = List.fold_left (fun r x -> r >>= fun () -> f x) (Ok ()) xs
let get_opt message = function
@ -25,8 +25,8 @@ module Testable = struct
x.restricted = y.restricted &&
match x.password_hash, y.password_hash with
| `Scrypt (hash, salt, params), `Scrypt (hash', salt', params') ->
Cstruct.equal hash hash' &&
Cstruct.equal salt salt' &&
String.equal hash hash' &&
String.equal salt salt' &&
params = params'
in
let pp ppf { Builder_web_auth.username; password_hash; restricted } =
@ -34,7 +34,7 @@ module Testable = struct
| `Scrypt (hash, salt, { Builder_web_auth.scrypt_n; scrypt_r; scrypt_p }) ->
Format.fprintf ppf "user:%s;(%d,%d,%d);%B;%a;%a" username
scrypt_n scrypt_r scrypt_p restricted
Cstruct.hexdump_pp hash Cstruct.hexdump_pp salt
Ohex.pp hash Ohex.pp salt
in
Alcotest.testable
pp
@ -43,18 +43,15 @@ module Testable = struct
let file =
let equal (x : Builder_db.Rep.file) (y : Builder_db.Rep.file) =
Fpath.equal x.filepath y.filepath &&
Fpath.equal x.localpath y.localpath &&
Cstruct.equal x.sha256 y.sha256 &&
String.equal x.sha256 y.sha256 &&
x.size = y.size
in
let pp ppf { Builder_db.Rep.filepath; localpath; sha256; size } =
let pp ppf { Builder_db.Rep.filepath; sha256; size } =
Format.fprintf ppf "{@[<v 1>@;<1 0>Builder_db.Rep.filepath = %a;@;<1 0>\
localpath = %a;@;<1 0>\
sha256 = %a;@;<1 0>\
size = %d;@;<1 0>\
@]@,}"
Fpath.pp filepath Fpath.pp localpath
Cstruct.hexdump_pp sha256 size
Fpath.pp filepath Ohex.pp sha256 size
in
Alcotest.testable pp equal
@ -133,14 +130,13 @@ let finish = Option.get (Ptime.of_float_s 1.)
let result = Builder.Exited 0
let main_binary =
let filepath = Result.get_ok (Fpath.of_string "bin/hello.sh") in
let localpath = Result.get_ok (Fpath.of_string "/dev/null") in
let data = "#!/bin/sh\necho Hello, World\n" in
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
let size = String.length data in
{ Builder_db.Rep.filepath; localpath; sha256; size }
{ Builder_db.Rep.filepath; sha256; size }
let main_binary2 =
let data = "#!/bin/sh\necho Hello, World 2\n" in
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
let size = String.length data in
{ main_binary with sha256 ; size }
let platform = "exotic-os"
@ -149,21 +145,17 @@ let fail_if_none a =
Option.to_result ~none:(`Msg "Failed to retrieve") a
let add_test_build user_id (module Db : CONN) =
let r =
let open Builder_db in
Db.start () >>= fun () ->
Db.exec Job.try_add job_name >>= fun () ->
Db.find_opt Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
Db.exec Build.add { Build.uuid; start; finish; result; console; script; platform;
main_binary = None; input_id = None; user_id; job_id } >>= fun () ->
Db.find last_insert_rowid () >>= fun id ->
Db.exec Build_artifact.add (main_binary, id) >>= fun () ->
Db.find last_insert_rowid () >>= fun main_binary_id ->
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
Db.commit ()
in
Result.fold ~ok:Result.ok ~error:(fun _ -> Db.rollback ())
r
let open Builder_db in
Db.start () >>= fun () ->
Db.exec Job.try_add job_name >>= fun () ->
Db.find_opt Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
Db.exec Build.add { Build.uuid; start; finish; result; console; script; platform;
main_binary = None; input_id = None; user_id; job_id } >>= fun () ->
Db.find last_insert_rowid () >>= fun id ->
Db.exec Build_artifact.add (main_binary, id) >>= fun () ->
Db.find last_insert_rowid () >>= fun main_binary_id ->
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
Db.commit ()
let with_build_db f () =
or_fail
@ -227,7 +219,7 @@ let test_build_get_latest (module Db : CONN) =
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
Db.find_opt Builder_db.Build.get_latest_successful_with_binary (job_id, platform)
>>| get_opt "no latest build" >>| fun (_id, meta, main_binary') ->
Alcotest.(check (option Testable.file)) "same main binary" main_binary' (Some main_binary2);
Alcotest.(check Testable.file) "same main binary" main_binary2 main_binary';
Alcotest.(check Testable.uuid) "same uuid" meta.uuid uuid'
let test_build_get_previous (module Db : CONN) =
@ -269,6 +261,14 @@ let test_artifact_get_by_build_uuid (module Db : CONN) =
get_opt "no build" >>| fun (_id, file) ->
Alcotest.(check Testable.file) "same file" file main_binary
let test_artifact_exists_true (module Db : CONN) =
Db.find Builder_db.Build_artifact.exists main_binary.sha256 >>| fun exists ->
Alcotest.(check bool) "main binary exists" true exists
let test_artifact_exists_false (module Db : CONN) =
Db.find Builder_db.Build_artifact.exists main_binary2.sha256 >>| fun exists ->
Alcotest.(check bool) "main binary2 doesn't exists" false exists
(* XXX: This test should fail because main_binary on the corresponding build
* references its main_binary. This is not the case now due to foreign key. *)
let test_artifact_remove_by_build (module Db : CONN) =
@ -276,6 +276,39 @@ let test_artifact_remove_by_build (module Db : CONN) =
get_opt "no build" >>= fun (id, _build) ->
Db.exec Builder_db.Build_artifact.remove_by_build id
let test_get_builds_older_than (module Db : CONN) =
add_second_build (module Db) >>= fun () ->
let date = Option.get (Ptime.of_float_s (3600. /. 2.)) in
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, date) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "last build" builds [ uuid ];
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, Ptime_clock.now ()) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
(* NOTE(dinosaure): from the most recent to the older. *)
Alcotest.(check (list Testable.uuid)) "last builds" builds [ uuid'; uuid ];
Ok ()
let test_builds_excluding_latest_n (module Db : CONN) =
add_second_build (module Db) >>= fun () ->
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 1) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "keep recent build" builds [ uuid ];
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 2) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "keep 2 builds" builds [];
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 3) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "last more builds than we have" builds [];
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 0) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "delete all builds" builds [ uuid'; uuid ];
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, -1) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "test an incomprehensible argument (-1)" builds [ uuid'; uuid ];
Ok ()
let () =
let open Alcotest in
Alcotest.run "Builder_db" [
@ -306,6 +339,12 @@ let () =
"build-artifact", [
test_case "Get all by build" `Quick (with_build_db test_artifact_get_all_by_build);
test_case "Get by build uuid" `Quick (with_build_db test_artifact_get_by_build_uuid);
test_case "Artifact exists" `Quick (with_build_db test_artifact_exists_true);
test_case "Other artifact doesn't exists" `Quick (with_build_db test_artifact_exists_false);
test_case "Remove by build" `Quick (with_build_db test_artifact_remove_by_build);
];
"vacuum", [
test_case "Get builds older than now" `Quick (with_build_db test_get_builds_older_than);
test_case "Get older builds and keep a fixed number of then" `Quick (with_build_db test_builds_excluding_latest_n);
]
]