Compare commits

..

1 commit

64 changed files with 1089 additions and 2678 deletions

2
.gitignore vendored
View file

@ -1,4 +1,4 @@
_build _build
*~ *~
*# *#
_opam

View file

@ -1,27 +1,3 @@
## v0.2.0 (2024-09-05) # v0.1.0 (2021-11-12)
A whole slew of changes. Internally, we made a lot of incremental changes and improvements without doing a release. Thus this release is rather big. There is a lot of database migrations to apply, and unfortunately they have to be applied one at a time.
* Add a /failed-builds/ endpoint that lists the most recent failed builds.
* By default don't display failed builds on the front page.
* Times are printed with the 'Z' time zone offset indicator.
* Link to comparisons of builds take into account whether the "input", among others the list of dependencies, is different.
* New subcommand `builder-db extract-build` takes a build UUID and extracts the builder "full" file.
* Add /job/<job>/build/<build>/all.tar.gz endpoint with a gzip compressed tar archive of all build artifacts.
* Visual overhaul.
* Add (optional) visualizations displaying package dependencies ("opam-graph") and for unikernels a "modulectomy" view of how much each OCaml module is contributing to the final binary size. The visualizations are read from a cache on disk and can be generated from a script.
* A script hook is added on file upload. It may be used to generate visualizations or publish system packages to a repository.
* The 404 file not found page tries to be more informative.
* The build page for a unikernel build displays the solo5 device manifest, e.g. `with block devices "storage", and net devices "service"`.
* URLs with trailing slash redirect to without the trailing slash.
* Builder-web will try to be more helpful if its database doesn't exist or the database version is wrong.
* The opam diff works for mirage 4 unikernels taking into account the opam-monorepo/duniverse packages.
* Markdown rendering is now done using cmarkit instead of omd.
* Builder-web doesn't display jobs older than 30 days (customizable with `--expired-jobs` command line argument) on the front page.
* Build artifacts are stored by their content, and artifacts are automatically deduplicated. This makes builder-web much more space efficient on deployments that don't use deduplication on the filesystem level.
* New subcommands `builder-db vacuum *` to remove older builds. Can be called from a cron job to keep disk usage bounded.
* Lots of other improvements and bug fixes.
## v0.1.0 (2021-11-12)
* Initial public release * Initial public release

View file

@ -1,13 +1,13 @@
# Builder-web - a web frontend for reproducible builds # Builder-web - a web frontend for reproducible builds
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status. Builder-web takes in submissions of builds, typically from [builder](https://github.com/roburio/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
Produced binaries can be downloaded and executed. Produced binaries can be downloaded and executed.
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web. [builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
## Overview ## Overview
Builder-web is a single binary web server using a sqlite3 database with versioned schemas. Builder-web is a single binary web server using a sqlite3 database with versioned schemas.
Finished builds from [builder](https://github.com/robur-coop/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user. Finished builds from [builder](https://github.com/roburio/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user.
Users can: Users can:
* Get an overview of *jobs* - a job is typically script or opam package that is run and builds an artifact, * Get an overview of *jobs* - a job is typically script or opam package that is run and builds an artifact,
@ -58,9 +58,3 @@ see `builder-db user-add --help`):
```ocaml ```ocaml
curl --data-binary @<build-hash>.full http://<user>:<passwd>@localhost:<builder-web-port>/upload curl --data-binary @<build-hash>.full http://<user>:<passwd>@localhost:<builder-web-port>/upload
``` ```
## JSON Responses
Some endpoints return JSON when the headers contain `Accept: application/json`.
- `/compare/:build_left/:build_right`
- `/job/:job/build/latest/**`
- `/job/:job/build/latest`

View file

@ -12,9 +12,9 @@ let scrypt_params ?(scrypt_n = 16384) ?(scrypt_r = 8) ?(scrypt_p = 1) () =
{ scrypt_n; scrypt_r; scrypt_p } { scrypt_n; scrypt_r; scrypt_p }
type pbkdf2_sha256 = type pbkdf2_sha256 =
[ `Pbkdf2_sha256 of string * string * pbkdf2_sha256_params ] [ `Pbkdf2_sha256 of Cstruct.t * Cstruct.t * pbkdf2_sha256_params ]
type scrypt = [ `Scrypt of string * string * scrypt_params ] type scrypt = [ `Scrypt of Cstruct.t * Cstruct.t * scrypt_params ]
type password_hash = [ pbkdf2_sha256 | scrypt ] type password_hash = [ pbkdf2_sha256 | scrypt ]
@ -25,10 +25,10 @@ type 'a user_info = {
} }
let pbkdf2_sha256 ~params:{ pbkdf2_sha256_iter = count } ~salt ~password = let pbkdf2_sha256 ~params:{ pbkdf2_sha256_iter = count } ~salt ~password =
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
let scrypt ~params:{ scrypt_n = n; scrypt_r = r; scrypt_p = p } ~salt ~password = let scrypt ~params:{ scrypt_n = n; scrypt_r = r; scrypt_p = p } ~salt ~password =
Scrypt.scrypt ~n ~r ~p ~dk_len:32l ~salt ~password Scrypt_kdf.scrypt_kdf ~n ~r ~p ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
let hash ?(scrypt_params=scrypt_params ()) let hash ?(scrypt_params=scrypt_params ())
~username ~password ~restricted () = ~username ~password ~restricted () =
@ -43,10 +43,10 @@ let hash ?(scrypt_params=scrypt_params ())
let verify_password password user_info = let verify_password password user_info =
match user_info.password_hash with match user_info.password_hash with
| `Pbkdf2_sha256 (password_hash, salt, params) -> | `Pbkdf2_sha256 (password_hash, salt, params) ->
String.equal Cstruct.equal
(pbkdf2_sha256 ~params ~salt ~password) (pbkdf2_sha256 ~params ~salt ~password)
password_hash password_hash
| `Scrypt (password_hash, salt, params) -> | `Scrypt (password_hash, salt, params) ->
String.equal Cstruct.equal
(scrypt ~params ~salt ~password) (scrypt ~params ~salt ~password)
password_hash password_hash

View file

@ -1,3 +1,3 @@
(library (library
(name builder_web_auth) (name builder_web_auth)
(libraries kdf.pbkdf kdf.scrypt mirage-crypto-rng)) (libraries pbkdf scrypt-kdf mirage-crypto-rng))

View file

@ -16,13 +16,6 @@ let defer_foreign_keys =
Caqti_type.unit ->. Caqti_type.unit @@ Caqti_type.unit ->. Caqti_type.unit @@
"PRAGMA defer_foreign_keys = ON" "PRAGMA defer_foreign_keys = ON"
let build_artifacts_to_orphan =
Builder_db.Rep.id `build ->* Caqti_type.octets @@
{| SELECT a.sha256 FROM build_artifact a
WHERE a.build = ? AND
(SELECT COUNT(*) FROM build_artifact a2
WHERE a2.sha256 = a.sha256 AND a2.build <> a.build) = 0 |}
let connect uri = let connect uri =
let* (module Db : Caqti_blocking.CONNECTION) = Caqti_blocking.connect ~tweaks_version:(1,8) uri in let* (module Db : Caqti_blocking.CONNECTION) = Caqti_blocking.connect ~tweaks_version:(1,8) uri in
let* () = Db.exec defer_foreign_keys () in let* () = Db.exec defer_foreign_keys () in
@ -43,16 +36,6 @@ let do_migrate dbpath =
let migrate () dbpath = let migrate () dbpath =
or_die 1 (do_migrate dbpath) or_die 1 (do_migrate dbpath)
let artifacts_dir datadir = Fpath.(datadir / "_artifacts")
let artifact_path sha256 =
let sha256 = Ohex.encode sha256 in
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
(* NOTE: We add the prefix to reduce the number of files in a directory - a
workaround for inferior filesystems. We can easily revert this by changing
this function and adding a migration. *)
let prefix = String.sub sha256 0 2 in
Fpath.(v "_artifacts" / prefix / sha256)
let user_mod action dbpath scrypt_n scrypt_r scrypt_p username unrestricted = let user_mod action dbpath scrypt_n scrypt_r scrypt_p username unrestricted =
let scrypt_params = Builder_web_auth.scrypt_params ?scrypt_n ?scrypt_r ?scrypt_p () in let scrypt_params = Builder_web_auth.scrypt_params ?scrypt_n ?scrypt_r ?scrypt_p () in
let r = let r =
@ -108,7 +91,7 @@ let user_disable () dbpath username =
match user with match user with
| None -> Error (`Msg "user not found") | None -> Error (`Msg "user not found")
| Some (_, user_info) -> | Some (_, user_info) ->
let password_hash = `Scrypt ("", "", Builder_web_auth.scrypt_params ()) in let password_hash = `Scrypt (Cstruct.empty, Cstruct.empty, Builder_web_auth.scrypt_params ()) in
let user_info = { user_info with password_hash ; restricted = true } in let user_info = { user_info with password_hash ; restricted = true } in
Db.exec Builder_db.User.update_user user_info Db.exec Builder_db.User.update_user user_info
in in
@ -148,26 +131,6 @@ let access_remove () dbpath username jobname =
in in
or_die 1 r or_die 1 r
let delete_build datadir (module Db : Caqti_blocking.CONNECTION) jobname id uuid =
let dir = Fpath.(v datadir / jobname / Uuidm.to_string uuid) in
(match Bos.OS.Dir.delete ~recurse:true dir with
| Ok _ -> ()
| Error `Msg e -> Logs.warn (fun m -> m "failed to remove build directory %a: %s" Fpath.pp dir e));
let* () =
Db.iter_s build_artifacts_to_orphan
(fun sha256 ->
let p = Fpath.(v datadir // artifact_path sha256) in
match Bos.OS.Path.delete p with
| Ok () -> Ok ()
| Error `Msg e ->
Logs.warn (fun m -> m "failed to remove orphan artifact %a: %s"
Fpath.pp p e);
Ok ())
id
in
let* () = Db.exec Builder_db.Build_artifact.remove_by_build id in
Db.exec Builder_db.Build.remove id
let job_remove () datadir jobname = let job_remove () datadir jobname =
let dbpath = datadir ^ "/builder.sqlite3" in let dbpath = datadir ^ "/builder.sqlite3" in
let r = let r =
@ -187,7 +150,12 @@ let job_remove () datadir jobname =
let* () = let* () =
List.fold_left (fun r (build_id, build) -> List.fold_left (fun r (build_id, build) ->
let* () = r in let* () = r in
delete_build datadir (module Db) jobname build_id build.Builder_db.Build.uuid) let dir = Fpath.(v datadir / jobname / Uuidm.to_string build.Builder_db.Build.uuid) in
(match Bos.OS.Dir.delete ~recurse:true dir with
| Ok _ -> ()
| Error `Msg e -> Logs.warn (fun m -> m "failed to remove build directory %a: %s" Fpath.pp dir e));
let* () = Db.exec Builder_db.Build_artifact.remove_by_build build_id in
Db.exec Builder_db.Build.remove build_id)
(Ok ()) (Ok ())
builds builds
in in
@ -205,103 +173,13 @@ let job_remove () datadir jobname =
in in
or_die 1 r or_die 1 r
let vacuum datadir (module Db : Caqti_blocking.CONNECTION) platform_opt job_id predicate =
let* jobname = Db.find Builder_db.Job.get job_id in
let* builds =
match predicate with
| `Date older_than ->
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, platform_opt, older_than)
| `Latest latest_n ->
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, platform_opt, latest_n)
| `Latest_successful latest_n ->
let* latest_n =
Db.find_opt Builder_db.Build.get_nth_latest_successful
(job_id, platform_opt, latest_n)
in
match latest_n with
| None ->
Ok []
| Some (id, latest_n) ->
let+ builds =
Db.collect_list Builder_db.Build.get_builds_older_than
(job_id, platform_opt, latest_n.finish)
in
(* Unfortunately, get_builds_older_than is non-strict comparison;
so we need to filter out [latest_n]. *)
List.filter (fun (id', _) -> id <> id') builds
in
let pp_reason ppf = function
| `Date older_than ->
Format.fprintf ppf "has no builds older than %a" (Ptime.pp_rfc3339 ()) older_than
| `Latest n ->
Format.fprintf ppf "has fewer than %d builds" n
| `Latest_successful n ->
Format.fprintf ppf "has fewer than %d successful builds" n
in
if builds = [] then
(* NOTE: this function may be called on *all* jobs, and in that case maybe
this is too verbose? *)
Logs.info (fun m -> m "Job %s %a; not removing any builds"
jobname pp_reason predicate);
List.fold_left (fun r (build_id, build) ->
let* () = r in
let* () = Db.start () in
let* () = Db.exec defer_foreign_keys () in
match
delete_build datadir (module Db) jobname build_id
build.Builder_db.Build.uuid
with
| Ok () -> Db.commit ()
| Error _ as e ->
let* () = Db.rollback () in
e)
(Ok ())
builds
let vacuum () datadir platform_opt jobnames predicate =
let dbpath = datadir ^ "/builder.sqlite3" in
let r =
let* (module Db : Caqti_blocking.CONNECTION) =
connect (Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
in
let* jobs =
match jobnames with
| [] ->
(* We default to all jobs if no jobnames were specified *)
let* jobs = Db.collect_list Builder_db.Job.get_all_with_section_synopsis () in
Ok (List.map (fun (job_id, _, _, _) -> job_id) jobs)
| _ :: _ ->
let* (jobs, unknown_jobnames) =
List.fold_left
(fun r jobname ->
let* (jobs, unknown_jobnames) = r in
let* job_id_opt = Db.find_opt Builder_db.Job.get_id_by_name jobname in
match job_id_opt with
| Some job_id -> Ok (job_id :: jobs, unknown_jobnames)
| None -> Ok (jobs, jobname :: unknown_jobnames))
(Ok ([], []))
jobnames
in
match unknown_jobnames with
| [] -> Ok jobs
| _ :: _ ->
Error (`Msg ("Unknown job(s): " ^ String.concat ", " unknown_jobnames))
in
List.fold_left (fun r jobid ->
let* () = r in
vacuum datadir (module Db) platform_opt jobid predicate)
(Ok ())
jobs
in
or_die 1 r
let input_ids = let input_ids =
Caqti_type.unit ->* Caqti_type.octets @@ Caqti_type.unit ->* Builder_db.Rep.cstruct @@
"SELECT DISTINCT input_id FROM build WHERE input_id IS NOT NULL" "SELECT DISTINCT input_id FROM build WHERE input_id IS NOT NULL"
let main_artifact_hash = let main_artifact_hash =
Caqti_type.octets ->* Builder_db.Rep.cstruct ->*
Caqti_type.t3 Caqti_type.octets Builder_db.Rep.uuid Caqti_type.string @@ Caqti_type.tup3 Builder_db.Rep.cstruct Builder_db.Rep.uuid Caqti_type.string @@
{| {|
SELECT a.sha256, b.uuid, j.name FROM build_artifact a, build b, job j SELECT a.sha256, b.uuid, j.name FROM build_artifact a, build b, job j
WHERE b.input_id = ? AND a.id = b.main_binary AND b.job = j.id WHERE b.input_id = ? AND a.id = b.main_binary AND b.job = j.id
@ -319,12 +197,12 @@ let verify_input_id () dbpath =
match hashes with match hashes with
| (h, uuid, jobname) :: tl -> | (h, uuid, jobname) :: tl ->
List.iter (fun (h', uuid', _) -> List.iter (fun (h', uuid', _) ->
if String.equal h h' then if Cstruct.equal h h' then
() ()
else else
Logs.warn (fun m -> m "job %s input id %a with two different hashes (%a, %a), build %a and %a" Logs.warn (fun m -> m "job %s input id %a with two different hashes (%a, %a), build %a and %a"
jobname Ohex.pp input_id jobname Cstruct.hexdump_pp input_id
Ohex.pp h Ohex.pp h' Cstruct.hexdump_pp h Cstruct.hexdump_pp h'
Uuidm.pp uuid Uuidm.pp uuid')) Uuidm.pp uuid Uuidm.pp uuid'))
tl tl
| [] -> ()) | [] -> ())
@ -336,17 +214,18 @@ let num_build_artifacts =
Caqti_type.unit ->! Caqti_type.int @@ Caqti_type.unit ->! Caqti_type.int @@
"SELECT count(*) FROM build_artifact" "SELECT count(*) FROM build_artifact"
let build_artifacts : (unit, string * Uuidm.t * Fpath.t * string * int64, [ `One | `Zero | `Many ]) Caqti_request.t = let build_artifacts : (unit, string * Uuidm.t * (Fpath.t * Fpath.t * Cstruct.t * int64), [ `One | `Zero | `Many ]) Caqti_request.t =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t5 string Builder_db.Rep.uuid Builder_db.Rep.fpath octets int64) Caqti_type.(tup3 string Builder_db.Rep.uuid
(tup4 Builder_db.Rep.fpath Builder_db.Rep.fpath Builder_db.Rep.cstruct int64))
@@ @@
{| SELECT job.name, b.uuid, a.filepath, a.sha256, a.size {| SELECT job.name, b.uuid, a.filepath, a.localpath, a.sha256, a.size
FROM build_artifact a, build b, job FROM build_artifact a, build b, job
WHERE a.build = b.id AND b.job = job.id |} WHERE a.build = b.id AND b.job = job.id |}
let script_and_console : (unit, _, [`One | `Zero | `Many ]) Caqti_request.t = let script_and_console : (unit, _, [`One | `Zero | `Many ]) Caqti_request.t =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t4 string Builder_db.Rep.uuid Builder_db.Rep.fpath Builder_db.Rep.fpath) Caqti_type.(tup4 string Builder_db.Rep.uuid Builder_db.Rep.fpath Builder_db.Rep.fpath)
@@ @@
{| SELECT job.name, b.uuid, b.console, b.script {| SELECT job.name, b.uuid, b.console, b.script
FROM build b, job FROM build b, job
@ -378,33 +257,36 @@ let verify_data_dir () datadir =
let idx = ref 0 in let idx = ref 0 in
fun () -> incr idx; if !idx mod 100 = 0 then Logs.info (fun m -> m "%d" !idx); fun () -> incr idx; if !idx mod 100 = 0 then Logs.info (fun m -> m "%d" !idx);
in in
let verify_job_and_uuid job uuid path = let verify_job_and_uuid ?fpath job uuid path =
match Fpath.segs path with match Fpath.segs path with
| job' :: uuid' :: _tl -> | job' :: uuid' :: tl ->
if String.equal job job' then () else Logs.warn (fun m -> m "job names do not match: %s vs %s" job job'); if String.equal job job' then () else Logs.warn (fun m -> m "job names do not match: %s vs %s" job job');
if String.equal (Uuidm.to_string uuid) uuid' then () else Logs.warn (fun m -> m "uuid does not match: %s vs %s" (Uuidm.to_string uuid) uuid'); if String.equal (Uuidm.to_string uuid) uuid' then () else Logs.warn (fun m -> m "uuid does not match: %s vs %s" (Uuidm.to_string uuid) uuid');
(match fpath, tl with
| None, _ -> ()
| Some f, "output" :: tl ->
if Fpath.equal (Fpath.v (String.concat "/" tl)) f then
()
else
Logs.err (fun m -> m "path (%a) and fpath (%a) do not match" Fpath.pp path Fpath.pp f)
| Some _, _ ->
Logs.err (fun m -> m "path is not of form <job>/<uuid>/output/<filename>: %a" Fpath.pp path))
| _ -> Logs.err (fun m -> m "path is not of form <job>/<uuid>/...: %a" Fpath.pp path) | _ -> Logs.err (fun m -> m "path is not of form <job>/<uuid>/...: %a" Fpath.pp path)
in in
let* () = let* () =
Db.iter_s build_artifacts (fun (_job, _uuid, _fpath, sha256, size) -> Db.iter_s build_artifacts (fun (job, uuid, (fpath, lpath, sha, size)) ->
progress (); progress ();
if not (FpathSet.mem (artifact_path sha256) !files_tracked) then verify_job_and_uuid ~fpath job uuid lpath;
let abs_path = Fpath.(v datadir // artifact_path sha256) in let abs_path = Fpath.(v datadir // lpath) in
(match Bos.OS.File.read abs_path with (match Bos.OS.File.read abs_path with
| Error (`Msg msg) -> Logs.err (fun m -> m "file %a not present: %s" Fpath.pp abs_path msg) | Error (`Msg msg) -> Logs.err (fun m -> m "file %a not present: %s" Fpath.pp abs_path msg)
| Ok data -> | Ok data ->
files_tracked := FpathSet.add (artifact_path sha256) !files_tracked; files_tracked := FpathSet.add lpath !files_tracked;
let s = Int64.of_int (String.length data) in let s = Int64.of_int (String.length data) in
if s <> size then Logs.err (fun m -> m "File %a has different size (in DB %Lu on disk %Lu)" Fpath.pp abs_path size s); if s <> size then Logs.err (fun m -> m "File %a has different size (in DB %Lu on disk %Lu)" Fpath.pp abs_path size s);
let sha256' = Digestif.SHA256.(to_raw_string (digest_string data)) in let sh = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
if not (String.equal sha256 sha256') then if not (Cstruct.equal sha sh) then Logs.err (fun m -> m "File %a has different hash (in DB %a on disk %a" Fpath.pp abs_path Cstruct.hexdump_pp sha Cstruct.hexdump_pp sh)) ;
Logs.err (fun m -> m "File %a has different hash (in DB %a on disk %a)" Ok ()
Fpath.pp abs_path
Ohex.pp sha256
Ohex.pp sha256')) ;
Ok ()
else
Ok ()
) () ) ()
in in
Db.iter_s script_and_console (fun (job, uuid, console, script) -> Db.iter_s script_and_console (fun (job, uuid, console, script) ->
@ -424,7 +306,7 @@ let verify_data_dir () datadir =
files_untracked; files_untracked;
or_die 1 r or_die 1 r
module Verify_cache_dir = struct module Verify_cache_dir = struct
let verify_dir_exists d = let verify_dir_exists d =
let* dir_exists = Bos.OS.Dir.exists d in let* dir_exists = Bos.OS.Dir.exists d in
@ -440,7 +322,7 @@ module Verify_cache_dir = struct
let string_is_int s = match int_of_string_opt s with let string_is_int s = match int_of_string_opt s with
| None -> false | None -> false
| Some _ -> true | Some _ -> true
let verify_cache_subdir ~cachedir d = let verify_cache_subdir ~cachedir d =
match Bos.OS.Dir.exists Fpath.(cachedir // d) with match Bos.OS.Dir.exists Fpath.(cachedir // d) with
| Ok false -> () | Ok false -> ()
@ -455,7 +337,7 @@ module Verify_cache_dir = struct
let prefix = viz_prefix ^ "_" in let prefix = viz_prefix ^ "_" in
let has_prefix = String.starts_with ~prefix dir_str in let has_prefix = String.starts_with ~prefix dir_str in
let has_valid_ending = let has_valid_ending =
if not has_prefix then false else if not has_prefix then false else
let ending = let ending =
String.(sub dir_str String.(sub dir_str
(length prefix) (length prefix)
@ -471,7 +353,7 @@ module Verify_cache_dir = struct
m "Invalid cache subdirectory name: '%s'" dir_str) m "Invalid cache subdirectory name: '%s'" dir_str)
let get_latest_viz_version viz_typ = let get_latest_viz_version viz_typ =
let* v_str, run_status = begin match viz_typ with let* v_str, run_status = begin match viz_typ with
| `Treemap -> | `Treemap ->
let cmd = Bos.Cmd.(v "modulectomy" % "--version") in let cmd = Bos.Cmd.(v "modulectomy" % "--version") in
Bos.OS.Cmd.(cmd |> run_out |> out_string) Bos.OS.Cmd.(cmd |> run_out |> out_string)
@ -480,7 +362,7 @@ module Verify_cache_dir = struct
Bos.OS.Cmd.(cmd |> run_out |> out_string) Bos.OS.Cmd.(cmd |> run_out |> out_string)
end in end in
match run_status with match run_status with
| (cmd_info, `Exited 0) -> | (cmd_info, `Exited 0) ->
begin try Ok (int_of_string v_str) with Failure _ -> begin try Ok (int_of_string v_str) with Failure _ ->
let msg = let msg =
Fmt.str "Couldn't parse latest version from %a: '%s'" Fmt.str "Couldn't parse latest version from %a: '%s'"
@ -490,7 +372,7 @@ module Verify_cache_dir = struct
Error (`Msg msg) Error (`Msg msg)
end end
| (cmd_info, _) -> | (cmd_info, _) ->
let msg = let msg =
Fmt.str "Error running visualization cmd: '%a'" Fmt.str "Error running visualization cmd: '%a'"
Bos.Cmd.pp (Bos.OS.Cmd.run_info_cmd cmd_info) Bos.Cmd.pp (Bos.OS.Cmd.run_info_cmd cmd_info)
in in
@ -544,8 +426,8 @@ module Verify_cache_dir = struct
type t = { type t = {
uuid : Uuidm.t; uuid : Uuidm.t;
job_name : string; job_name : string;
hash_opam_switch : string option; hash_opam_switch : Cstruct.t option;
hash_debug_bin : string option; hash_debug_bin : Cstruct.t option;
} }
let repr = let repr =
@ -556,11 +438,11 @@ module Verify_cache_dir = struct
in in
Caqti_type.custom ~encode ~decode Caqti_type.custom ~encode ~decode
Caqti_type.( Caqti_type.(
t4 tup4
Builder_db.Rep.uuid Builder_db.Rep.uuid
string string
(option octets) (option Builder_db.Rep.cstruct)
(option octets)) (option Builder_db.Rep.cstruct))
end end
@ -572,13 +454,12 @@ module Verify_cache_dir = struct
ba_opam_switch.sha256 hash_opam_switch, ba_opam_switch.sha256 hash_opam_switch,
ba_debug_bin.sha256 hash_debug_bin ba_debug_bin.sha256 hash_debug_bin
FROM build AS b FROM build AS b
WHERE b.main_binary IS NOT NULL
LEFT JOIN build_artifact AS ba_opam_switch ON LEFT JOIN build_artifact AS ba_opam_switch ON
ba_opam_switch.build = b.id ba_opam_switch.build = b.id
AND ba_opam_switch.filepath = 'opam-switch' AND ba_opam_switch.filepath = 'opam-switch'
LEFT JOIN build_artifact AS ba_debug_bin ON LEFT JOIN build_artifact AS ba_debug_bin ON
ba_debug_bin.build = b.id ba_debug_bin.build = b.id
AND ba_debug_bin.filepath LIKE '%.debug' AND ba_debug_bin.localpath LIKE '%.debug'
|} |}
let check_viz_nonempty ~cachedir ~viz_typ ~hash = let check_viz_nonempty ~cachedir ~viz_typ ~hash =
@ -586,7 +467,7 @@ module Verify_cache_dir = struct
let* latest_version = let* latest_version =
Viz_aux.get_viz_version_from_dirs ~cachedir ~viz_typ Viz_aux.get_viz_version_from_dirs ~cachedir ~viz_typ
in in
let viz_input_hash = Ohex.encode hash in let `Hex viz_input_hash = Hex.of_cstruct hash in
let* viz_path = let* viz_path =
Viz_aux.choose_versioned_viz_path Viz_aux.choose_versioned_viz_path
~cachedir ~cachedir
@ -601,7 +482,7 @@ module Verify_cache_dir = struct
let verify_viz_file_vizdeps ~cachedir build = let verify_viz_file_vizdeps ~cachedir build =
match build.Build.hash_opam_switch with match build.Build.hash_opam_switch with
| None -> | None ->
Logs.warn (fun m -> Logs.warn (fun m ->
m "%s: uuid '%a': Doesn't support dependencies viz because of \ m "%s: uuid '%a': Doesn't support dependencies viz because of \
missing 'opam-switch'" missing 'opam-switch'"
@ -610,7 +491,7 @@ module Verify_cache_dir = struct
| Some hash_opam_switch -> | Some hash_opam_switch ->
match match
check_viz_nonempty check_viz_nonempty
~cachedir ~cachedir
~viz_typ:`Dependencies ~viz_typ:`Dependencies
~hash:hash_opam_switch ~hash:hash_opam_switch
with with
@ -631,7 +512,7 @@ module Verify_cache_dir = struct
~cachedir ~cachedir
~viz_typ:`Treemap ~viz_typ:`Treemap
~hash:hash_debug_bin ~hash:hash_debug_bin
with with
| Ok () -> () | Ok () -> ()
| Error (`Msg err) -> | Error (`Msg err) ->
Logs.warn (fun m -> Logs.warn (fun m ->
@ -666,7 +547,7 @@ module Verify_cache_dir = struct
match extract_hash ~viz_typ build with match extract_hash ~viz_typ build with
| None -> () | None -> ()
| Some input_hash -> | Some input_hash ->
let input_hash = Ohex.encode input_hash in let `Hex input_hash = Hex.of_cstruct input_hash in
let viz_path = Viz_aux.viz_path let viz_path = Viz_aux.viz_path
~cachedir ~cachedir
~viz_typ ~viz_typ
@ -686,18 +567,18 @@ module Verify_cache_dir = struct
Fpath.pp viz_path) Fpath.pp viz_path)
type msg = [ `Msg of string ] type msg = [ `Msg of string ]
let open_error_msg : ('a, msg) result -> ('a, [> msg]) result = let open_error_msg : ('a, msg) result -> ('a, [> msg]) result =
function function
| Ok _ as v -> v | Ok _ as v -> v
| Error e -> Error (e : msg :> [> msg]) | Error e -> Error (e : msg :> [> msg])
let verify () datadir cachedir = let verify () datadir cachedir =
let module Viz_aux = Builder_web.Viz_aux in let module Viz_aux = Builder_web.Viz_aux in
begin begin
let* datadir = Fpath.of_string datadir |> open_error_msg in let* datadir = Fpath.of_string datadir |> open_error_msg in
let* cachedir = match cachedir with let* cachedir = match cachedir with
| Some d -> Fpath.of_string d |> open_error_msg | Some d -> Fpath.of_string d |> open_error_msg
| None -> Ok Fpath.(datadir / "_cache") | None -> Ok Fpath.(datadir / "_cache")
in in
let* () = verify_dir_exists cachedir in let* () = verify_dir_exists cachedir in
@ -740,8 +621,8 @@ end
module Asn = struct module Asn = struct
let decode_strict codec cs = let decode_strict codec cs =
match Asn.decode codec cs with match Asn.decode codec cs with
| Ok (a, rest) -> | Ok (a, cs) ->
if String.length rest = 0 if Cstruct.length cs = 0
then Ok a then Ok a
else Error "trailing bytes" else Error "trailing bytes"
| Error (`Parse msg) -> Error ("parse error: " ^ msg) | Error (`Parse msg) -> Error ("parse error: " ^ msg)
@ -759,22 +640,20 @@ module Asn = struct
let console_of_cs, console_to_cs = projections_of console let console_of_cs, console_to_cs = projections_of console
end end
(* NOTE: this function is duplicatedi in lib/model.ml *)
let console_of_string data = let console_of_string data =
let lines = String.split_on_char '\n' data in let lines = String.split_on_char '\n' data in
List.filter_map (fun line -> (* remove last empty line *)
match String.index line ':' with let lines =
| 0 -> Logs.warn (fun m -> m "console line starting with colon %S" line); None match List.rev lines with
| i -> | "" :: lines -> List.rev lines
(* the timestamp is of the form "%fs", e.g. 0.867s; so chop off the 's' *) | _ -> lines
let delta = float_of_string (String.sub line 0 (i - 1)) in in
let delta = Int64.to_int (Duration.of_f delta) in List.map (fun line ->
let line = String.sub line i (String.length line - i) in match String.split_on_char ':' line with
Some (delta, line) | ts :: tail ->
| exception Not_found -> let delta = float_of_string (String.sub ts 0 (String.length ts - 1)) in
if line <> "" then Int64.to_int (Duration.of_f delta), String.concat ":" tail
Logs.warn (fun m -> m "Unexpected console line %S" line); | _ -> assert false)
None)
lines lines
let extract_full () datadir dest uuid = let extract_full () datadir dest uuid =
@ -801,16 +680,16 @@ let extract_full () datadir dest uuid =
let out = console_of_string console in let out = console_of_string console in
let* artifacts = Db.collect_list Builder_db.Build_artifact.get_all_by_build build_id in let* artifacts = Db.collect_list Builder_db.Build_artifact.get_all_by_build build_id in
let* data = let* data =
List.fold_left (fun acc (_, { Builder_db.filepath; sha256; _ }) -> List.fold_left (fun acc (_, { Builder_db.filepath; localpath; _ }) ->
let* acc = acc in let* acc = acc in
let* data = Bos.OS.File.read Fpath.(v datadir // artifact_path sha256) in let* data = Bos.OS.File.read Fpath.(v datadir // localpath) in
Ok ((filepath, data) :: acc)) Ok ((filepath, data) :: acc))
(Ok []) (Ok [])
artifacts artifacts
in in
let exec = (job, uuid, out, start, finish, result, data) in let exec = (job, uuid, out, start, finish, result, data) in
let data = Builder.Asn.exec_to_str exec in let cs = Builder.Asn.exec_to_cs exec in
Bos.OS.File.write (Fpath.v dest) data Bos.OS.File.write (Fpath.v dest) (Cstruct.to_string cs)
in in
or_die 1 r or_die 1 r
@ -822,89 +701,81 @@ let help man_format cmds = function
else `Error (true, "Unknown command: " ^ cmd) else `Error (true, "Unknown command: " ^ cmd)
let dbpath = let dbpath =
let doc = "sqlite3 database path." in let doc = "sqlite3 database path" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt non_dir_file (Builder_system.default_datadir ^ "/builder.sqlite3") & opt non_dir_file (Builder_system.default_datadir ^ "/builder.sqlite3") &
info ~doc ["dbpath"]) info ~doc ["dbpath"])
let dbpath_new = let dbpath_new =
let doc = "sqlite3 database path." in let doc = "sqlite3 database path" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt string (Builder_system.default_datadir ^ "/builder.sqlite3") & opt string (Builder_system.default_datadir ^ "/builder.sqlite3") &
info ~doc ["dbpath"]) info ~doc ["dbpath"])
let datadir = let datadir =
let doc = "Data directory." in let doc = "data directory" in
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt dir Builder_system.default_datadir & opt dir Builder_system.default_datadir &
info ~doc ~env ["datadir"; "d"]) info ~doc ["datadir"; "d"])
let cachedir = let cachedir =
let doc = "Cache directory." in let doc = "cache directory" in
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_CACHEDIR" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt (some dir) None & opt (some dir) None &
info ~doc ~env ["cachedir"]) info ~doc ["cachedir"])
let jobname = let jobname =
let doc = "Jobname." in let doc = "jobname" in
Cmdliner.Arg.(required & Cmdliner.Arg.(required &
pos 0 (some string) None & pos 0 (some string) None &
info ~doc ~docv:"JOBNAME" []) info ~doc ~docv:"JOBNAME" [])
let username = let username =
let doc = "Username." in let doc = "username" in
Cmdliner.Arg.(required & Cmdliner.Arg.(required &
pos 0 (some string) None & pos 0 (some string) None &
info ~doc ~docv:"USERNAME" []) info ~doc ~docv:"USERNAME" [])
let password_iter = let password_iter =
let doc = "Password hash count." in let doc = "password hash count" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt (some int) None & opt (some int) None &
info ~doc ["hash-count"]) info ~doc ["hash-count"])
let scrypt_n = let scrypt_n =
let doc = "scrypt n parameter." in let doc = "scrypt n parameter" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt (some int) None & opt (some int) None &
info ~doc ["scrypt-n"]) info ~doc ["scrypt-n"])
let scrypt_r = let scrypt_r =
let doc = "scrypt r parameter." in let doc = "scrypt r parameter" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt (some int) None & opt (some int) None &
info ~doc ["scrypt-r"]) info ~doc ["scrypt-r"])
let scrypt_p = let scrypt_p =
let doc = "scrypt p parameter." in let doc = "scrypt p parameter" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt (some int) None & opt (some int) None &
info ~doc ["scrypt-p"]) info ~doc ["scrypt-p"])
let unrestricted = let unrestricted =
let doc = "Unrestricted user." in let doc = "unrestricted user" in
Cmdliner.Arg.(value & flag & info ~doc [ "unrestricted" ]) Cmdliner.Arg.(value & flag & info ~doc [ "unrestricted" ])
let job = let job =
let doc = "Job." in let doc = "job" in
Cmdliner.Arg.(required & Cmdliner.Arg.(required &
pos 1 (some string) None & pos 1 (some string) None &
info ~doc ~docv:"JOB" []) info ~doc ~docv:"JOB" [])
let build = let build =
let doc = "Build uuid." in let doc = "build uuid" in
Cmdliner.Arg.(required & Cmdliner.Arg.(required &
pos 0 (some string) None & pos 0 (some string) None &
info ~doc ~docv:"BUILD" []) info ~doc ~docv:"BUILD" [])
let platform =
let doc = "Platform." in
Cmdliner.Arg.(value &
opt (some string) None &
info ~doc ~docv:"PLATFORM" ["platform"])
let full_dest = let full_dest =
let doc = "path to write build file" in let doc = "path to write build file" in
Cmdliner.Arg.(value & opt string "full" & Cmdliner.Arg.(value & opt string "full" &
@ -978,102 +849,6 @@ let job_remove_cmd =
let info = Cmd.info ~doc "job-remove" in let info = Cmd.info ~doc "job-remove" in
Cmd.v info term Cmd.v info term
let vacuum_cmd =
let jobs =
Arg.(value & opt_all string [] & info ~doc:"Job(s). Can be passed multiple times." ~docv:"JOB" ["job"])
in
let ptime_conv =
let parse s =
match Ptime.of_rfc3339 s with
| Ok (ptime, (None | Some 0), _) ->
Ok (`Date ptime)
| Ok _ -> Error (`Msg "only UTC timezone is allowed")
| Error `RFC3339 (_range, e) ->
Error (`Msg (Format.asprintf "bad RFC3339 date-time: %a" Ptime.pp_rfc3339_error e))
and pp ppf (`Date ptime) =
Ptime.pp_rfc3339 () ppf ptime
in
Arg.conv (parse, pp)
in
let older_than =
let doc = "cut-off date-time" in
Arg.(required & pos 0 (some ptime_conv) None & info ~doc ~docv:"OLDER-THAN" [])
in
(* TODO(reynir): for now we disallow 0 so as to avoid ending up with jobs
without builds. I'm unsure how well builder-web works with empty jobs.
Then again we don't do this check for older-than... *)
let latest_n =
let doc = "latest N" in
let latest_n =
let parse s =
match Arg.(conv_parser int) s with
| Ok n when n > 0 -> Ok (`Latest n)
| Ok _ -> Error (`Msg "must be positive integer")
| Error _ as e -> e
and pp ppf (`Latest n) =
Arg.(conv_printer int) ppf n
in
Arg.conv (parse, pp)
in
Arg.(required & pos 0 (some latest_n) None & info ~doc ~docv:"LATEST-N" [])
in
let latest_n_succesful =
let doc = "latest N successful" in
let latest_n =
let parse s =
match Arg.(conv_parser int) s with
| Ok n when n > 0 -> Ok (`Latest_successful n)
| Ok _ -> Error (`Msg "must be positive integer")
| Error _ as e -> e
and pp ppf (`Latest_successful n) =
Arg.(conv_printer int) ppf n
in
Arg.conv (parse, pp)
in
Arg.(required & pos 0 (some latest_n) None & info ~doc ~docv:"LATEST-N" [])
in
let job_default_txt =
"By default all jobs are vacuumed, unless any jobs are specified using --job."
in
let vacuum_older_than =
let doc =
Printf.sprintf "Remove builds older than a date. %s" job_default_txt
in
let info = Cmd.info ~doc "older-than" in
let term =
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ older_than)
in
Cmd.v info term
in
let vacuum_except_latest_n =
let doc =
Printf.sprintf "Remove all builds except for the latest N builds (successful or not). %s"
job_default_txt
in
let info = Cmd.info ~doc "except-latest" in
let term =
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ latest_n)
in
Cmd.v info term
in
let vacuum_except_latest_n_successful =
let doc =
Printf.sprintf "Remove all builds except for builds newer than the Nth latest successful build. %s"
job_default_txt
in
let info = Cmd.info ~doc "except-latest-successful" in
let term =
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ latest_n_succesful)
in
Cmd.v info term
in
let doc = "Remove old builds" in
Cmd.group (Cmd.info ~doc "vacuum") [
vacuum_older_than;
vacuum_except_latest_n;
vacuum_except_latest_n_successful;
]
let extract_full_cmd = let extract_full_cmd =
let doc = "extract a build from the database" in let doc = "extract a build from the database" in
let term = Term.( let term = Term.(
@ -1117,7 +892,7 @@ let default_cmd, default_info =
Cmd.info ~doc "builder-db" Cmd.info ~doc "builder-db"
let () = let () =
Mirage_crypto_rng_unix.use_default (); Mirage_crypto_rng_unix.initialize ();
Cmdliner.Cmd.group Cmdliner.Cmd.group
~default:default_cmd default_info ~default:default_cmd default_info
[ help_cmd; migrate_cmd; [ help_cmd; migrate_cmd;
@ -1127,7 +902,6 @@ let () =
verify_input_id_cmd; verify_input_id_cmd;
verify_data_dir_cmd; verify_data_dir_cmd;
verify_cache_dir_cmd; verify_cache_dir_cmd;
extract_full_cmd; extract_full_cmd ]
vacuum_cmd ]
|> Cmdliner.Cmd.eval |> Cmdliner.Cmd.eval
|> exit |> exit

View file

@ -30,7 +30,7 @@ let write_raw s buf =
safe_close s >|= fun () -> safe_close s >|= fun () ->
Error `Exception) Error `Exception)
in in
(* Logs.debug (fun m -> m "writing %a" (Ohex.pp_hexdump ()) (Bytes.unsafe_to_string buf)) ; *) (* Logs.debug (fun m -> m "writing %a" Cstruct.hexdump_pp (Cstruct.of_bytes buf)) ; *)
w 0 (Bytes.length buf) w 0 (Bytes.length buf)
let process = let process =
@ -81,28 +81,28 @@ let init_influx name data =
let run_batch_viz ~cachedir ~datadir ~configdir = let run_batch_viz ~cachedir ~datadir ~configdir =
let open Rresult.R.Infix in let open Rresult.R.Infix in
begin begin
let script = Fpath.(configdir / "batch-viz.sh") let script = Fpath.(configdir / "batch-viz.sh")
and script_log = Fpath.(cachedir / "batch-viz.log") and script_log = Fpath.(cachedir / "batch-viz.log")
and viz_script = Fpath.(configdir / "upload-hooks" / "visualizations.sh") and viz_script = Fpath.(configdir / "upload-hooks" / "visualizations.sh")
in in
Bos.OS.File.exists script >>= fun script_exists -> Bos.OS.File.exists script >>= fun script_exists ->
if not script_exists then begin if not script_exists then begin
Logs.warn (fun m -> m "Didn't find %s" (Fpath.to_string script)); Logs.warn (fun m -> m "Didn't find %s" (Fpath.to_string script));
Ok () Ok ()
end else end else
let args = let args =
[ "--cache-dir=" ^ Fpath.to_string cachedir; [ "--cache-dir=" ^ Fpath.to_string cachedir;
"--data-dir=" ^ Fpath.to_string datadir; "--data-dir=" ^ Fpath.to_string datadir;
"--viz-script=" ^ Fpath.to_string viz_script ] "--viz-script=" ^ Fpath.to_string viz_script ]
|> List.map (fun s -> "\"" ^ String.escaped s ^ "\"") |> List.map (fun s -> "\"" ^ String.escaped s ^ "\"")
|> String.concat " " |> String.concat " "
in in
(*> Note: The reason for appending, is that else a new startup could (*> Note: The reason for appending, is that else a new startup could
overwrite an existing running batch's log*) overwrite an existing running batch's log*)
(Fpath.to_string script ^ " " ^ args (Fpath.to_string script ^ " " ^ args
^ " 2>&1 >> " ^ Fpath.to_string script_log ^ " 2>&1 >> " ^ Fpath.to_string script_log
^ " &") ^ " &")
|> Sys.command |> Sys.command
|> ignore |> ignore
|> Result.ok |> Result.ok
end end
@ -113,37 +113,27 @@ let run_batch_viz ~cachedir ~datadir ~configdir =
m "Error while starting batch-viz.sh: %a" m "Error while starting batch-viz.sh: %a"
Rresult.R.pp_msg err) Rresult.R.pp_msg err)
let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag expired_jobs = let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag =
let dbpath = Printf.sprintf "%s/builder.sqlite3" datadir in let dbpath = Printf.sprintf "%s/builder.sqlite3" datadir in
let datadir = Fpath.v datadir in let datadir = Fpath.v datadir in
let cachedir = let cachedir =
cachedir |> Option.fold ~none:Fpath.(datadir / "_cache") ~some:Fpath.v cachedir |> Option.fold ~none:Fpath.(datadir / "_cache") ~some:Fpath.v
in in
let configdir = Fpath.v configdir in let configdir = Fpath.v configdir in
let () = Mirage_crypto_rng_unix.use_default () in
let () = init_influx "builder-web" influx in let () = init_influx "builder-web" influx in
let () = let () =
if run_batch_viz_flag then if run_batch_viz_flag then
run_batch_viz ~cachedir ~datadir ~configdir run_batch_viz ~cachedir ~datadir ~configdir
in in
match Builder_web.init dbpath datadir with match Builder_web.init dbpath datadir with
| exception Sqlite3.Error e ->
Format.eprintf "Error: @[@,%s.\
@,Does the database file exist? Create with `builder-db migrate`.@]\n%!"
e;
exit 2
| Error (#Caqti_error.load as e) -> | Error (#Caqti_error.load as e) ->
Format.eprintf "Error: %a\n%!" Caqti_error.pp e; Format.eprintf "Error: %a\n%!" Caqti_error.pp e;
exit 2 exit 2
| Error (`Wrong_version _ as e) ->
Format.eprintf "Error: @[@,%a.\
@,Migrate database version with `builder-migrations`,\
@,or start with a fresh database with `builder-db migrate`.@]\n%!"
Builder_web.pp_error e;
| Error ( | Error (
#Caqti_error.connect #Caqti_error.connect
| #Caqti_error.call_or_retrieve | #Caqti_error.call_or_retrieve
| `Msg _ as e | `Msg _
| `Wrong_version _ as e
) -> ) ->
Format.eprintf "Error: %a\n%!" Builder_web.pp_error e; Format.eprintf "Error: %a\n%!" Builder_web.pp_error e;
exit 1 exit 1
@ -156,20 +146,13 @@ let setup_app level influx port host datadir cachedir configdir run_batch_viz_fl
| Some Error -> Some `Error | Some Error -> Some `Error
| Some App -> None | Some App -> None
in in
let error_handler = Dream.error_template Builder_web.error_template in
Dream.initialize_log ?level (); Dream.initialize_log ?level ();
let dream_routes = Builder_web.( Dream.run ~port ~interface:host ~tls:false
routes ~datadir ~cachedir ~configdir ~expired_jobs
|> to_dream_routes
)
in
Dream.run ~port ~interface:host ~tls:false ~error_handler
@@ Dream.logger @@ Dream.logger
@@ Dream_encoding.compress
@@ Dream.sql_pool ("sqlite3:" ^ dbpath) @@ Dream.sql_pool ("sqlite3:" ^ dbpath)
@@ Http_status_metrics.handle @@ Http_status_metrics.handle
@@ Builder_web.Middleware.remove_trailing_url_slash @@ Builder_web.Middleware.remove_trailing_url_slash
@@ Dream.router dream_routes @@ Dream.router (Builder_web.routes ~datadir ~cachedir ~configdir)
open Cmdliner open Cmdliner
@ -197,11 +180,10 @@ let ip_port : (Ipaddr.V4.t * int) Arg.conv =
let datadir = let datadir =
let doc = "data directory" in let doc = "data directory" in
let docv = "DATA_DIR" in let docv = "DATA_DIR" in
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
Arg.( Arg.(
value & value &
opt dir Builder_system.default_datadir & opt dir Builder_system.default_datadir &
info ~env [ "d"; "datadir" ] ~doc ~docv info [ "d"; "datadir" ] ~doc ~docv
) )
let cachedir = let cachedir =
@ -243,15 +225,11 @@ let run_batch_viz =
log is written to CACHE_DIR/batch-viz.log" in log is written to CACHE_DIR/batch-viz.log" in
Arg.(value & flag & info [ "run-batch-viz" ] ~doc) Arg.(value & flag & info [ "run-batch-viz" ] ~doc)
let expired_jobs =
let doc = "Amount of days after which a job is considered to be inactive if \
no successful build has been achieved (use 0 for infinite)" in
Arg.(value & opt int 30 & info [ "expired-jobs" ] ~doc)
let () = let () =
let term = let term =
Term.(const setup_app $ Logs_cli.level () $ influx $ port $ host $ datadir $ Term.(const setup_app $ Logs_cli.level () $ influx $ port $ host $ datadir $
cachedir $ configdir $ run_batch_viz $ expired_jobs) cachedir $ configdir $ run_batch_viz)
in in
let info = Cmd.info "Builder web" ~doc:"Builder web" ~man:[] in let info = Cmd.info "Builder web" ~doc:"Builder web" ~man:[] in
Cmd.v info term Cmd.v info term

View file

@ -7,13 +7,10 @@
(public_name builder-web) (public_name builder-web)
(name builder_web_app) (name builder_web_app)
(modules builder_web_app) (modules builder_web_app)
(libraries builder_web builder_system dream dream-encoding (libraries builder_web builder_system mirage-crypto-rng.unix cmdliner logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr ipaddr.unix http_status_metrics))
mirage-crypto-rng.unix cmdliner logs.cli metrics metrics-lwt metrics-influx
metrics-rusage ipaddr ipaddr.unix http_status_metrics))
(executable (executable
(public_name builder-db) (public_name builder-db)
(name builder_db_app) (name builder_db_app)
(modules builder_db_app) (modules builder_db_app)
(libraries builder_web builder_db builder_system caqti.blocking uri bos fmt (libraries builder_web builder_db builder_system caqti.blocking uri bos fmt logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))
logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))

View file

@ -61,10 +61,9 @@ let help man_format migrations = function
let datadir = let datadir =
let doc = "data directory containing builder.sqlite3 and data files" in let doc = "data directory containing builder.sqlite3 and data files" in
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
Cmdliner.Arg.(value & Cmdliner.Arg.(value &
opt dir Builder_system.default_datadir & opt dir Builder_system.default_datadir &
info ~env ~doc ["datadir"; "d"]) info ~doc ["datadir"; "d"])
let setup_log = let setup_log =
let setup_log level = let setup_log level =
@ -180,8 +179,6 @@ let () =
[ f20210910 ]; [ f20210910 ];
actions (module M20211105); actions (module M20211105);
actions (module M20220509); actions (module M20220509);
actions (module M20230911);
actions (module M20230914);
]) ])
|> Cmd.eval |> Cmd.eval
|> exit |> exit

View file

@ -1,5 +1,4 @@
(executable (executable
(public_name builder-migrations) (public_name builder-migrations)
(name builder_migrations) (name builder_migrations)
(libraries builder_system builder_db caqti caqti-driver-sqlite3 (libraries builder_system builder_db caqti caqti-driver-sqlite3 caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))
caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))

View file

@ -18,11 +18,11 @@ let all_builds =
"SELECT id FROM build" "SELECT id FROM build"
let bin_artifact = let bin_artifact =
Caqti_type.int64 ->* Caqti_type.(t2 int64 string) @@ Caqti_type.int64 ->* Caqti_type.(tup2 int64 string) @@
"SELECT id, filepath FROM build_artifact WHERE build = ? AND filepath LIKE 'bin/%'" "SELECT id, filepath FROM build_artifact WHERE build = ? AND filepath LIKE 'bin/%'"
let set_main_binary = let set_main_binary =
Caqti_type.(t2 int64 (option string)) ->. Caqti_type.unit @@ Caqti_type.(tup2 int64 (option string)) ->. Caqti_type.unit @@
"UPDATE build SET main_binary = $2 WHERE id = $1" "UPDATE build SET main_binary = $2 WHERE id = $1"
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) = let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =

View file

@ -37,21 +37,21 @@ let new_build_file =
|} |}
let collect_build_artifact = let collect_build_artifact =
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@ Caqti_type.unit ->* Caqti_type.(tup3 int64 (tup3 string string octets) int64) @@
"SELECT id, filepath, localpath, sha256, build FROM build_artifact" "SELECT id, filepath, localpath, sha256, build FROM build_artifact"
let collect_build_file = let collect_build_file =
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@ Caqti_type.unit ->* Caqti_type.(tup3 int64 (tup3 string string octets) int64) @@
"SELECT id, filepath, localpath, sha256, build FROM build_file" "SELECT id, filepath, localpath, sha256, build FROM build_file"
let insert_new_build_artifact = let insert_new_build_artifact =
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@ Caqti_type.(tup3 int64 (tup4 string string octets int64) int64) ->. Caqti_type.unit @@
{| INSERT INTO new_build_artifact (id, filepath, localpath, sha256, size, build) {| INSERT INTO new_build_artifact (id, filepath, localpath, sha256, size, build)
VALUES (?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?)
|} |}
let insert_new_build_file = let insert_new_build_file =
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@ Caqti_type.(tup3 int64 (tup4 string string octets int64) int64) ->. Caqti_type.unit @@
{| INSERT INTO new_build_file (id, filepath, localpath, sha256, size, build) {| INSERT INTO new_build_file (id, filepath, localpath, sha256, size, build)
VALUES (?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?)
|} |}

View file

@ -3,7 +3,7 @@ module Rep = Builder_db.Rep
open Grej.Infix open Grej.Infix
let broken_builds = let broken_builds =
Caqti_type.unit ->* Caqti_type.t3 (Rep.id `build) Rep.uuid Caqti_type.string @@ Caqti_type.unit ->* Caqti_type.tup3 (Rep.id `build) Rep.uuid Caqti_type.string @@
{| SELECT b.id, b.uuid, job.name FROM build b, job {| SELECT b.id, b.uuid, job.name FROM build b, job
WHERE result_kind = 0 AND result_code = 0 AND main_binary IS NOT NULL AND job.id = b.job AND WHERE result_kind = 0 AND result_code = 0 AND main_binary IS NOT NULL AND job.id = b.job AND
(SELECT COUNT( * ) FROM build_artifact a (SELECT COUNT( * ) FROM build_artifact a

View file

@ -7,11 +7,11 @@ let rollback_doc = "add datadir prefix to build_artifact.localpath"
open Grej.Infix open Grej.Infix
let build_artifacts = let build_artifacts =
Caqti_type.unit ->* Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@ Caqti_type.unit ->* Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@
"SELECT id, localpath FROM build_artifact" "SELECT id, localpath FROM build_artifact"
let build_artifact_update_localpath = let build_artifact_update_localpath =
Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@ Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@
"UPDATE build_artifact SET localpath = $2 WHERE id = $1" "UPDATE build_artifact SET localpath = $2 WHERE id = $1"
(* We are not migrating build_file because it is unused *) (* We are not migrating build_file because it is unused *)

View file

@ -54,20 +54,20 @@ let old_build =
let collect_old_build = let collect_old_build =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t3 Builder_db.Rep.untyped_id Caqti_type.(tup3 Builder_db.Rep.untyped_id
(t3 (t4 string int64 int64 int64) (tup3 (tup4 string int64 int64 int64)
(t4 int64 int (option int) (option string)) (tup4 int64 int (option int) (option string))
(t3 octets string (option string))) (tup3 octets string (option string)))
Builder_db.Rep.untyped_id) @@ Builder_db.Rep.untyped_id) @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg, {| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
console, script, main_binary, job console, script, main_binary, job
FROM build |} FROM build |}
let insert_new_build = let insert_new_build =
Caqti_type.(t3 Builder_db.Rep.untyped_id Caqti_type.(tup3 Builder_db.Rep.untyped_id
(t3 (t4 string int64 int64 int64) (tup3 (tup4 string int64 int64 int64)
(t4 int64 int (option int) (option string)) (tup4 int64 int (option int) (option string))
(t3 octets string (option Builder_db.Rep.untyped_id))) (tup3 octets string (option Builder_db.Rep.untyped_id)))
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@ Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, {| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
result_code, result_msg, console, script, main_binary, job) result_code, result_msg, console, script, main_binary, job)
@ -82,7 +82,7 @@ let rename_build =
"ALTER TABLE new_build RENAME TO build" "ALTER TABLE new_build RENAME TO build"
let find_main_artifact_id = let find_main_artifact_id =
Caqti_type.(t2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@ Caqti_type.(tup2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@
"SELECT id FROM build_artifact WHERE build = $1 AND filepath = $2" "SELECT id FROM build_artifact WHERE build = $1 AND filepath = $2"
let find_main_artifact_filepath = let find_main_artifact_filepath =
@ -91,20 +91,20 @@ let find_main_artifact_filepath =
let collect_new_build = let collect_new_build =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t3 Builder_db.Rep.untyped_id Caqti_type.(tup3 Builder_db.Rep.untyped_id
(t3 (t4 string int64 int64 int64) (tup3 (tup4 string int64 int64 int64)
(t4 int64 int (option int) (option string)) (tup4 int64 int (option int) (option string))
(t3 octets string (option Builder_db.Rep.untyped_id))) (tup3 octets string (option Builder_db.Rep.untyped_id)))
Builder_db.Rep.untyped_id) @@ Builder_db.Rep.untyped_id) @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg, {| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
console, script, main_binary, job console, script, main_binary, job
FROM build |} FROM build |}
let insert_old_build = let insert_old_build =
Caqti_type.(t3 Builder_db.Rep.untyped_id Caqti_type.(tup3 Builder_db.Rep.untyped_id
(t3 (t4 string int64 int64 int64) (tup3 (tup4 string int64 int64 int64)
(t4 int64 int (option int) (option string)) (tup4 int64 int (option int) (option string))
(t3 octets string (option string))) (tup3 octets string (option string)))
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@ Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, {| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
result_code, result_msg, console, script, main_binary, job) result_code, result_msg, console, script, main_binary, job)

View file

@ -34,21 +34,21 @@ let old_user =
let collect_old_user = let collect_old_user =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) @@ Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64)) @@
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p FROM user" "SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p FROM user"
let collect_new_user = let collect_new_user =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) @@ Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool)) @@
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted FROM user" "SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted FROM user"
let insert_new_user = let insert_new_user =
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) ->. Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool)) ->.
Caqti_type.unit @@ Caqti_type.unit @@
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) VALUES (?, ?, ?, ?, ?, ?, ?, ?)" "INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
let insert_old_user = let insert_old_user =
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) ->. Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64)) ->.
Caqti_type.unit @@ Caqti_type.unit @@
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p) VALUES (?, ?, ?, ?, ?, ?, ?)" "INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p) VALUES (?, ?, ?, ?, ?, ?, ?)"

View file

@ -42,7 +42,7 @@ let latest_successful_build =
let build_artifacts = let build_artifacts =
Builder_db.Rep.untyped_id ->* Builder_db.Rep.untyped_id ->*
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@ Caqti_type.tup2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
{| SELECT a.filepath, a.localpath {| SELECT a.filepath, a.localpath
FROM build_artifact a FROM build_artifact a
WHERE a.build = ? WHERE a.build = ?
@ -106,7 +106,7 @@ let insert_tag =
"INSERT INTO tag (tag) VALUES (?)" "INSERT INTO tag (tag) VALUES (?)"
let insert_job_tag = let insert_job_tag =
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->. Caqti_type.(tup3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
Caqti_type.unit @@ Caqti_type.unit @@
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)" "INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"

View file

@ -20,7 +20,7 @@ let latest_successful_build =
let build_artifacts = let build_artifacts =
Builder_db.Rep.untyped_id ->* Builder_db.Rep.untyped_id ->*
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@ Caqti_type.tup2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
{| SELECT a.filepath, a.localpath {| SELECT a.filepath, a.localpath
FROM build_artifact a FROM build_artifact a
WHERE a.build = ? WHERE a.build = ?
@ -31,7 +31,7 @@ let insert_tag =
"INSERT INTO tag (tag) VALUES (?)" "INSERT INTO tag (tag) VALUES (?)"
let insert_job_tag = let insert_job_tag =
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->. Caqti_type.(tup3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
Caqti_type.unit @@ Caqti_type.unit @@
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)" "INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"

View file

@ -55,11 +55,11 @@ let drop_input_id_from_build =
let builds = let builds =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t4 Caqti_type.tup4
Builder_db.Rep.untyped_id Builder_db.Rep.untyped_id
Caqti_type.octets Builder_db.Rep.cstruct
Caqti_type.octets Builder_db.Rep.cstruct
Caqti_type.octets @@ Builder_db.Rep.cstruct @@
{| SELECT b.id, opam.sha256, env.sha256, system.sha256 {| SELECT b.id, opam.sha256, env.sha256, system.sha256
FROM build b, build_artifact opam, build_artifact env, build_artifact system FROM build b, build_artifact opam, build_artifact env, build_artifact system
WHERE opam.filepath = 'opam-switch' AND env.filepath = 'build-environment' WHERE opam.filepath = 'opam-switch' AND env.filepath = 'build-environment'
@ -68,7 +68,7 @@ let builds =
|} |}
let set_input_id = let set_input_id =
Caqti_type.t2 Builder_db.Rep.untyped_id Caqti_type.octets ->. Caqti_type.unit @@ Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.cstruct ->. Caqti_type.unit @@
"UPDATE build SET input_id = $2 WHERE id = $1" "UPDATE build SET input_id = $2 WHERE id = $1"
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) = let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
@ -76,7 +76,7 @@ let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
Db.exec add_input_id_to_build () >>= fun () -> Db.exec add_input_id_to_build () >>= fun () ->
Db.collect_list builds () >>= fun builds -> Db.collect_list builds () >>= fun builds ->
Grej.list_iter_result (fun (id, opam_sha, env_sha, pkg_sha) -> Grej.list_iter_result (fun (id, opam_sha, env_sha, pkg_sha) ->
let input_id = Digestif.SHA256.(to_raw_string (digestv_string [ opam_sha ; env_sha ; pkg_sha ])) in let input_id = Mirage_crypto.Hash.SHA256.digest (Cstruct.concat [ opam_sha ; env_sha ; pkg_sha ]) in
Db.exec set_input_id (id, input_id)) Db.exec set_input_id (id, input_id))
builds >>= fun () -> builds >>= fun () ->
Db.exec (Grej.set_version new_version) () Db.exec (Grej.set_version new_version) ()

View file

@ -2,7 +2,7 @@ open Grej.Infix
let orb_left_in_builds = let orb_left_in_builds =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@ Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
{| SELECT id, localpath FROM build_artifact {| SELECT id, localpath FROM build_artifact
WHERE filepath = 'orb.deb' OR filepath = 'orb.txz' WHERE filepath = 'orb.deb' OR filepath = 'orb.txz'
|} |}

View file

@ -2,7 +2,7 @@ open Grej.Infix
let deb_debug_left_in_builds = let deb_debug_left_in_builds =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build) Caqti_type.tup4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build)
Builder_db.Rep.fpath Builder_db.Rep.fpath @@ Builder_db.Rep.fpath Builder_db.Rep.fpath @@
{| SELECT id, build, localpath, filepath FROM build_artifact {| SELECT id, build, localpath, filepath FROM build_artifact
WHERE filepath LIKE '%.deb.debug' WHERE filepath LIKE '%.deb.debug'
@ -17,7 +17,7 @@ let get_localpath =
"SELECT localpath FROM build_artifact WHERE id = ?" "SELECT localpath FROM build_artifact WHERE id = ?"
let update_paths = let update_paths =
Caqti_type.t3 (Builder_db.Rep.id `build_artifact) Caqti_type.tup3 (Builder_db.Rep.id `build_artifact)
Builder_db.Rep.fpath Builder_db.Rep.fpath ->. Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1" "UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"

View file

@ -2,7 +2,7 @@ open Grej.Infix
let all_builds_with_binary : (unit, [`build] Builder_db.Rep.id * [`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t = let all_builds_with_binary : (unit, [`build] Builder_db.Rep.id * [`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact) Caqti_type.tup4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact)
Builder_db.Rep.fpath Builder_db.Rep.fpath @@ Builder_db.Rep.fpath Builder_db.Rep.fpath @@
"SELECT b.id, b.main_binary, a.filepath, a.localpath FROM build b, build_artifact a WHERE b.main_binary = a.id AND b.main_binary IS NOT NULL" "SELECT b.id, b.main_binary, a.filepath, a.localpath FROM build b, build_artifact a WHERE b.main_binary = a.id AND b.main_binary IS NOT NULL"
@ -11,14 +11,14 @@ let build_not_stripped : ([`build] Builder_db.Rep.id, [`build_artifact] Builder_
"SELECT id FROM build_artifact WHERE build = ? AND filepath LIKE '%.debug'" "SELECT id FROM build_artifact WHERE build = ? AND filepath LIKE '%.debug'"
let update_paths : ([`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, unit, [ `Zero ]) Caqti_request.t = let update_paths : ([`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, unit, [ `Zero ]) Caqti_request.t =
Caqti_type.t3 (Builder_db.Rep.id `build_artifact) Caqti_type.tup3 (Builder_db.Rep.id `build_artifact)
Builder_db.Rep.fpath Builder_db.Rep.fpath ->. Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1" "UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
let add_artifact : ((Fpath.t * Fpath.t * string) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t = let add_artifact : ((Fpath.t * Fpath.t * Cstruct.t) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t =
Caqti_type.(t2 (t3 Builder_db.Rep.fpath Builder_db.Rep.fpath Caqti_type.octets) Caqti_type.(tup2 (tup3 Builder_db.Rep.fpath Builder_db.Rep.fpath Builder_db.Rep.cstruct)
(t2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->. (tup2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->.
Caqti_type.unit @@ Caqti_type.unit @@
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) VALUES (?, ?, ?, ?, ?)" "INSERT INTO build_artifact (filepath, localpath, sha256, size, build) VALUES (?, ?, ?, ?, ?)"
@ -48,8 +48,7 @@ let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
in in
assert (r = 0); assert (r = 0);
Bos.OS.File.read (Fpath.append datadir artifact_lpath) >>= fun data -> Bos.OS.File.read (Fpath.append datadir artifact_lpath) >>= fun data ->
let size = Int64.of_int (String.length data) let size = Int64.of_int (String.length data) and sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
and sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
Db.exec update_paths (artifact_id, new_artifact_lpath, path artifact_fpath) >>= fun () -> Db.exec update_paths (artifact_id, new_artifact_lpath, path artifact_fpath) >>= fun () ->
Db.exec add_artifact ((artifact_fpath, artifact_lpath, sha256), (size, build_id)) >>= fun () -> Db.exec add_artifact ((artifact_fpath, artifact_lpath, sha256), (size, build_id)) >>= fun () ->
Db.find Builder_db.last_insert_rowid () >>= fun new_build_artifact_id -> Db.find Builder_db.last_insert_rowid () >>= fun new_build_artifact_id ->

View file

@ -2,11 +2,11 @@ open Grej.Infix
let all_build_artifacts_with_dot_slash : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t = let all_build_artifacts_with_dot_slash : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@ Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
"SELECT id, filepath FROM build_artifact WHERE filepath LIKE './%'" "SELECT id, filepath FROM build_artifact WHERE filepath LIKE './%'"
let update_path : ([`build_artifact] Builder_db.Rep.id * Fpath.t, unit, [< `Zero | `One | `Many > `Zero ]) Caqti_request.t = let update_path : ([`build_artifact] Builder_db.Rep.id * Fpath.t, unit, [< `Zero | `One | `Many > `Zero ]) Caqti_request.t =
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->. Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE build_artifact SET filepath = $2 WHERE id = $1" "UPDATE build_artifact SET filepath = $2 WHERE id = $1"

View file

@ -40,11 +40,11 @@ let copy_old_build =
let old_build_execution_result = let old_build_execution_result =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@ Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@
"SELECT id, result_kind, result_code FROM build" "SELECT id, result_kind, result_code FROM build"
let update_new_build_execution_result = let update_new_build_execution_result =
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@ Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@
"UPDATE new_build SET result_code = $2 WHERE id = $1" "UPDATE new_build SET result_code = $2 WHERE id = $1"
let old_build = let old_build =
@ -83,11 +83,11 @@ let copy_new_build =
let new_build_execution_result = let new_build_execution_result =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) @@ Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int) @@
"SELECT id, result_code FROM build" "SELECT id, result_code FROM build"
let update_old_build_execution_result = let update_old_build_execution_result =
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->. Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE new_build SET result_kind = $2, result_code = $3 WHERE id = $1" "UPDATE new_build SET result_kind = $2, result_code = $3 WHERE id = $1"

View file

@ -2,12 +2,12 @@ open Grej.Infix
let all_build_artifacts_like_hashes : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t = let all_build_artifacts_like_hashes : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@ Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%.build-hashes'" "SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%.build-hashes'"
let all_build_artifacts_like_readme : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t = let all_build_artifacts_like_readme : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@ Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%README.md'" "SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%README.md'"
let fixup datadir (module Db : Caqti_blocking.CONNECTION) = let fixup datadir (module Db : Caqti_blocking.CONNECTION) =

View file

@ -8,8 +8,8 @@ open Grej.Infix
module Asn = struct module Asn = struct
let decode_strict codec cs = let decode_strict codec cs =
match Asn.decode codec cs with match Asn.decode codec cs with
| Ok (a, rest) -> | Ok (a, cs) ->
if String.length rest = 0 if Cstruct.length cs = 0
then Ok a then Ok a
else Error "trailing bytes" else Error "trailing bytes"
| Error (`Parse msg) -> Error ("parse error: " ^ msg) | Error (`Parse msg) -> Error ("parse error: " ^ msg)
@ -95,24 +95,24 @@ let copy_from_new_build =
let old_build_console_script = let old_build_console_script =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t4 (Builder_db.Rep.id (`build : [ `build ])) Caqti_type.(tup4 (Builder_db.Rep.id (`build : [ `build ]))
(t2 string Builder_db.Rep.uuid) octets string) @@ (tup2 string Builder_db.Rep.uuid) Builder_db.Rep.cstruct string) @@
"SELECT b.id, job.name, b.uuid, b.console, b.script FROM build b, job WHERE b.job = job.id" "SELECT b.id, job.name, b.uuid, b.console, b.script FROM build b, job WHERE b.job = job.id"
let update_new_build_console_script = let update_new_build_console_script =
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ]))
Builder_db.Rep.fpath Builder_db.Rep.fpath) ->. Builder_db.Rep.fpath Builder_db.Rep.fpath) ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1" "UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
let new_build_console_script = let new_build_console_script =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t3 (Builder_db.Rep.id (`build : [ `build ])) Caqti_type.tup3 (Builder_db.Rep.id (`build : [ `build ]))
Builder_db.Rep.fpath Builder_db.Rep.fpath @@ Builder_db.Rep.fpath Builder_db.Rep.fpath @@
"SELECT id, console, script FROM build" "SELECT id, console, script FROM build"
let update_old_build_console_script = let update_old_build_console_script =
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) octets string) ->. Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) Builder_db.Rep.cstruct string) ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1" "UPDATE new_build SET console = $2, script = $3 WHERE id = $1"

View file

@ -2,13 +2,13 @@ open Grej.Infix
let mixups = let mixups =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build])) Caqti_type.tup3 (Builder_db.Rep.id (`build : [`build]))
Builder_db.Rep.fpath Builder_db.Rep.fpath @@ Builder_db.Rep.fpath Builder_db.Rep.fpath @@
"SELECT id, console, script FROM build \ "SELECT id, console, script FROM build \
WHERE console LIKE '%/script.txt' AND script LIKE '%/console.txt'" WHERE console LIKE '%/script.txt' AND script LIKE '%/console.txt'"
let fixup = let fixup =
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build])) Caqti_type.tup3 (Builder_db.Rep.id (`build : [`build]))
Builder_db.Rep.fpath Builder_db.Rep.fpath ->. Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE build SET console = $2, script = $3 WHERE id = $1" "UPDATE build SET console = $2, script = $3 WHERE id = $1"

View file

@ -73,11 +73,11 @@ let copy_from_new_build =
|} |}
let build_id_and_user = let build_id_and_user =
Caqti_type.unit ->* Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@ Caqti_type.unit ->* Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@
"SELECT id, user FROM build" "SELECT id, user FROM build"
let update_new_build_platform = let update_new_build_platform =
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@ Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@
"UPDATE new_build SET platform = $2 WHERE id = $1" "UPDATE new_build SET platform = $2 WHERE id = $1"
let drop_build = let drop_build =

View file

@ -6,9 +6,9 @@ and rollback_doc = "switch uuid encoding back to binary"
open Grej.Infix open Grej.Infix
let old_uuid_rep = let old_uuid_rep =
let encode uuid = Ok (Uuidm.to_binary_string uuid) in let encode uuid = Ok (Uuidm.to_bytes uuid) in
let decode s = let decode s =
Uuidm.of_binary_string s Uuidm.of_bytes s
|> Option.to_result ~none:"failed to decode uuid" |> Option.to_result ~none:"failed to decode uuid"
in in
Caqti_type.custom ~encode ~decode Caqti_type.string Caqti_type.custom ~encode ~decode Caqti_type.string
@ -23,21 +23,21 @@ let new_uuid_rep =
let uuids_byte_encoded_q = let uuids_byte_encoded_q =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@ Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@
"SELECT id, uuid FROM build" "SELECT id, uuid FROM build"
let uuids_hex_encoded_q = let uuids_hex_encoded_q =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@ Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@
"SELECT id, uuid FROM build" "SELECT id, uuid FROM build"
let migrate_q = let migrate_q =
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->. Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE build SET uuid = $2 WHERE id = $1" "UPDATE build SET uuid = $2 WHERE id = $1"
let rollback_q = let rollback_q =
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->. Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->.
Caqti_type.unit @@ Caqti_type.unit @@
"UPDATE build SET uuid = $2 WHERE id = $1" "UPDATE build SET uuid = $2 WHERE id = $1"

View file

@ -1,32 +0,0 @@
let new_version = 17L and old_version = 16L
and identifier = "2023-09-11"
and migrate_doc = "index failed builds on main binary is null"
and rollback_doc = "index failed builds on exit code"
open Grej.Syntax
let drop_idx_build_failed =
Caqti_type.(unit ->. unit) @@
"DROP INDEX idx_build_failed"
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:old_version (module Db) in
let* () = Db.exec drop_idx_build_failed () in
let* () =
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
WHERE main_binary IS NULL")
()
in
Db.exec (Grej.set_version new_version) ()
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:new_version (module Db) in
let* () = Db.exec drop_idx_build_failed () in
let* () =
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
WHERE result_code <> 0")
()
in
Db.exec (Grej.set_version old_version) ()

View file

@ -1,162 +0,0 @@
let new_version = 18L and old_version = 17L
and identifier = "2023-09-14"
and migrate_doc = "Artifacts are stored content-addressed in the filesystem"
and rollback_doc = "Artifacts are stored under their build's job name and uuid"
open Grej.Syntax
let new_build_artifact =
Caqti_type.unit ->. Caqti_type.unit @@
{| CREATE TABLE new_build_artifact (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
filepath TEXT NOT NULL,
sha256 BLOB NOT NULL,
size INTEGER NOT NULL,
build INTEGER NOT NULL,
FOREIGN KEY(build) REFERENCES build(id),
UNIQUE(build, filepath)
)
|}
let old_build_artifact =
Caqti_type.unit ->. Caqti_type.unit @@
{| CREATE TABLE new_build_artifact (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
filepath TEXT NOT NULL, -- the path as in the build
localpath TEXT NOT NULL, -- local path to the file on disk
sha256 BLOB NOT NULL,
size INTEGER NOT NULL,
build INTEGER NOT NULL,
FOREIGN KEY(build) REFERENCES build(id),
UNIQUE(build, filepath)
)
|}
let idx_build_artifact_sha256 =
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)"
let idx_build_artifact_build =
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)"
let copy_new_build_artifact =
Caqti_type.(unit ->. unit) @@
{| INSERT INTO new_build_artifact(id, filepath, sha256, size, build)
SELECT id, filepath, sha256, size, build
FROM build_artifact
|}
let copy_old_build_artifact =
Caqti_type.(unit ->. unit) @@
{| INSERT INTO new_build_artifact(id, filepath, localpath, sha256, size, build)
SELECT a.id, a.filepath,
j.name || '/' || b.uuid || '/output/' || a.filepath,
a.sha256, a.size, a.build
FROM build_artifact a, job j, build b
WHERE b.id = a.build AND j.id = b.job
|}
let new_build_artifact_paths =
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
{| SELECT localpath, '_artifacts/' || substr(lower(hex(sha256)), 1, 2) || '/' || lower(hex(sha256))
FROM build_artifact
|}
let old_build_artifact_paths =
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
{| SELECT '_artifacts/' || substr(lower(hex(a.sha256)), 1, 2) || '/' || lower(hex(a.sha256)),
j.name || '/' || b.uuid || '/output/' || a.filepath
FROM build_artifact a, job j, build b
WHERE b.id = a.build AND j.id = b.job
|}
let drop_build_artifact =
Caqti_type.(unit ->. unit) @@
"DROP TABLE build_artifact"
let rename_build_artifact =
Caqti_type.(unit ->. unit) @@
"ALTER TABLE new_build_artifact RENAME TO build_artifact"
let move_paths ?force datadir (old_path, new_path) =
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
Bos.OS.Path.move ?force old_path new_path
let copy_paths datadir (old_path, new_path) =
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
let new_path_tmp = Fpath.(new_path + "tmp") in
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
let cmd = Bos.Cmd.(v "cp" % p old_path % p new_path_tmp) in
let* () =
match Bos.OS.Cmd.run_status cmd with
| Ok `Exited 0 ->
Ok ()
| Ok status ->
let _ = Bos.OS.Path.delete new_path_tmp in
Error (`Msg (Fmt.str "cp failed: %a" Bos.OS.Cmd.pp_status status))
| Error _ as e ->
let _ = Bos.OS.Path.delete new_path_tmp in
e
in
Bos.OS.Path.move ~force:true new_path_tmp new_path
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:old_version (module Db) in
let* () = Db.exec new_build_artifact () in
let* () = Db.exec copy_new_build_artifact () in
let* () = Db.iter_s new_build_artifact_paths (move_paths ~force:true datadir) () in
let* () = Db.exec drop_build_artifact () in
let* () = Db.exec rename_build_artifact () in
let* () = Db.exec idx_build_artifact_sha256 () in
let* () = Db.exec idx_build_artifact_build () in
Db.exec (Grej.set_version new_version) ()
let rollback datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:new_version (module Db) in
let* () = Db.exec old_build_artifact () in
let* () = Db.exec copy_old_build_artifact () in
let* () = Db.iter_s old_build_artifact_paths (copy_paths datadir) () in
let* () =
Db.iter_s old_build_artifact_paths
(fun (old_path, _new_path) ->
Bos.OS.Path.delete Fpath.(datadir // v old_path))
()
in
let* () = Db.exec drop_build_artifact () in
let* () = Db.exec rename_build_artifact () in
let* () = Db.exec idx_build_artifact_sha256 () in
Db.exec (Grej.set_version old_version) ()
(* migration failed but managed to move *some* files *)
let fixup_migrate datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:old_version (module Db) in
let* () =
Db.iter_s new_build_artifact_paths
(fun (old_path, new_path) ->
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
let* new_exists = Bos.OS.Path.exists Fpath.(datadir // v new_path) in
if new_exists && not old_exists then
copy_paths datadir (new_path, old_path)
else Ok ())
()
in
Db.iter_s new_build_artifact_paths
(fun (_old_path, new_path) ->
Bos.OS.Path.delete Fpath.(datadir // v new_path))
()
(* rollback failed but some or all artifacts were copied *)
let fixup_rollback datadir (module Db : Caqti_blocking.CONNECTION) =
let* () = Grej.check_version ~user_version:new_version (module Db) in
Db.iter_s old_build_artifact_paths
(fun (old_path, new_path) ->
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
if old_exists then
Bos.OS.Path.delete Fpath.(datadir // v new_path)
else
move_paths datadir (new_path, old_path))
()

View file

@ -1,9 +1,9 @@
opam-version: "2.0" opam-version: "2.0"
maintainer: "Reynir Björnsson <reynir@reynir.dk>" maintainer: "Reynir Björnsson <reynir@reynir.dk>"
authors: ["Reynir Björnsson <reynir@reynir.dk>"] authors: ["Reynir Björnsson <reynir@reynir.dk>"]
homepage: "https://github.com/robur-coop/builder-web" homepage: "https://github.com/roburio/builder-web"
dev-repo: "git+https://github.com/robur-coop/builder-web.git" dev-repo: "git+https://github.com/roburio/builder-web.git"
bug-reports: "https://github.com/robur-coop/builder-web/issues" bug-reports: "https://github.com/roburio/builder-web/issues"
license: "ISC" license: "ISC"
build: [ build: [
@ -17,17 +17,19 @@ build: [
depends: [ depends: [
"ocaml" {>= "4.13.0"} "ocaml" {>= "4.13.0"}
"dune" {>= "2.7.0"} "dune" {>= "2.7.0"}
"builder" {>= "0.4.0"} "builder" {>= "0.2.0"}
"dream" {>= "1.0.0~alpha7"} "dream" {= "1.0.0~alpha4"}
"dream-encoding" "cstruct" {>= "6.0.0"}
"bos" "bos"
"ohex" {>= "0.2.0"} "hex"
"lwt" {>= "5.7.0"} "lwt" {>= "5.3.0"}
"caqti" {>= "2.1.2"} "caqti" {>= "1.8.0"}
"caqti-lwt" "caqti-lwt"
"caqti-driver-sqlite3" "caqti-driver-sqlite3"
"mirage-crypto-rng" {>= "1.2.0"} "pbkdf"
"kdf" "mirage-crypto-rng"
"scrypt-kdf"
"alcotest" {with-test}
"opam-core" "opam-core"
"opam-format" {>= "2.1.0"} "opam-format" {>= "2.1.0"}
"metrics" {>= "0.3.0"} "metrics" {>= "0.3.0"}
@ -38,29 +40,27 @@ depends: [
"tyxml" {>= "4.3.0"} "tyxml" {>= "4.3.0"}
"ptime" "ptime"
"duration" "duration"
"asn1-combinators" {>= "0.3.0"} "mirage-crypto"
"asn1-combinators"
"logs" "logs"
"cmdliner" {>= "1.1.0"} "cmdliner" {>= "1.1.0"}
"uri" "uri"
"fmt" {>= "0.8.7"} "fmt" {>= "0.8.7"}
"cmarkit" {>= "0.3.0"} "omd"
"tar" {>= "3.0.0"} "tar"
"tar-unix" {>= "3.0.0"} "owee"
"cachet" {>= "0.0.2"} "solo5-elftool" {>= "0.3.0"}
"solo5-elftool" {>= "0.4.0"} "decompress"
"decompress" {>= "1.5.0"}
"digestif" {>= "1.2.0"}
"uuidm" {>= "0.9.9"}
"yojson"
"alcotest" {>= "1.2.0" & with-test}
"ppx_deriving" {with-test}
"ppx_deriving_yojson" {with-test}
] ]
synopsis: "Web interface for builder" synopsis: "Web interface for builder"
description: """ description: """
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status. Builder-web takes in submissions of builds, typically from [builder](https://github.com/roburio/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
Produced binaries can be downloaded and executed. Produced binaries can be downloaded and executed.
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web. [builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
""" """
x-maintenance-intent: [ "(latest)" ]
pin-depends: [
["modulectomy.dev" "git+https://github.com/roburio/modulectomy.git"]
["opam-graph.dev" "git+https://git.robur.io/robur/opam-graph.git"]
]

View file

@ -4,15 +4,15 @@ open Caqti_request.Infix
let application_id = 1234839235l let application_id = 1234839235l
(* Please update this when making changes! And also update (* Please update this when making changes! *)
packaging/batch-viz.sh and packaging/visualizations.sh. *) let current_version = 16L
let current_version = 18L
type 'a id = 'a Rep.id type 'a id = 'a Rep.id
type file = Rep.file = { type file = Rep.file = {
filepath : Fpath.t; filepath : Fpath.t;
sha256 : string; localpath : Fpath.t;
sha256 : Cstruct.t;
size : int; size : int;
} }
@ -57,7 +57,7 @@ module Job = struct
let get_all_with_section_synopsis = let get_all_with_section_synopsis =
Caqti_type.unit ->* Caqti_type.unit ->*
Caqti_type.(t4 (id `job) string (option string) (option string)) @@ Caqti_type.(tup4 (id `job) string (option string) (option string)) @@
{| SELECT j.id, j.name, section.value, synopsis.value {| SELECT j.id, j.name, section.value, synopsis.value
FROM job j, tag section_tag, tag synopsis_tag FROM job j, tag section_tag, tag synopsis_tag
LEFT JOIN job_tag section ON section.job = j.id AND section.tag = section_tag.id LEFT JOIN job_tag section ON section.job = j.id AND section.tag = section_tag.id
@ -117,15 +117,15 @@ module Job_tag = struct
"DROP TABLE IF EXISTS job_tag" "DROP TABLE IF EXISTS job_tag"
let add = let add =
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@ Caqti_type.(tup3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
"INSERT INTO job_tag (tag, value, job) VALUES ($1, $2, $3)" "INSERT INTO job_tag (tag, value, job) VALUES ($1, $2, $3)"
let update = let update =
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@ Caqti_type.(tup3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
"UPDATE job_tag SET value = $2 WHERE tag = $1 AND job = $3" "UPDATE job_tag SET value = $2 WHERE tag = $1 AND job = $3"
let get_value = let get_value =
Caqti_type.(t2 (id `tag) (id `job)) ->? Caqti_type.string @@ Caqti_type.(tup2 (id `tag) (id `job)) ->? Caqti_type.string @@
"SELECT value FROM job_tag WHERE tag = ? AND job = ?" "SELECT value FROM job_tag WHERE tag = ? AND job = ?"
let remove_by_job = let remove_by_job =
@ -140,6 +140,7 @@ module Build_artifact = struct
{| CREATE TABLE build_artifact ( {| CREATE TABLE build_artifact (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
filepath TEXT NOT NULL, -- the path as in the build filepath TEXT NOT NULL, -- the path as in the build
localpath TEXT NOT NULL, -- local path to the file on disk
sha256 BLOB NOT NULL, sha256 BLOB NOT NULL,
size INTEGER NOT NULL, size INTEGER NOT NULL,
build INTEGER NOT NULL, build INTEGER NOT NULL,
@ -155,30 +156,26 @@ module Build_artifact = struct
let get = let get =
id `build_artifact ->! file @@ id `build_artifact ->! file @@
{| SELECT filepath, sha256, size {| SELECT filepath, localpath, sha256, size
FROM build_artifact WHERE id = ? |} FROM build_artifact WHERE id = ? |}
let get_by_build_uuid = let get_by_build_uuid =
Caqti_type.t2 uuid fpath ->? Caqti_type.t2 (id `build_artifact) file @@ Caqti_type.tup2 uuid fpath ->? Caqti_type.tup2 (id `build_artifact) file @@
{| SELECT build_artifact.id, build_artifact.filepath, {| SELECT build_artifact.id, build_artifact.filepath,
build_artifact.sha256, build_artifact.size build_artifact.localpath, build_artifact.sha256, build_artifact.size
FROM build_artifact FROM build_artifact
INNER JOIN build ON build.id = build_artifact.build INNER JOIN build ON build.id = build_artifact.build
WHERE build.uuid = ? AND build_artifact.filepath = ? WHERE build.uuid = ? AND build_artifact.filepath = ?
|} |}
let get_all_by_build = let get_all_by_build =
id `build ->* Caqti_type.(t2 (id `build_artifact) file) @@ id `build ->* Caqti_type.(tup2 (id `build_artifact) file) @@
"SELECT id, filepath, sha256, size FROM build_artifact WHERE build = ?" "SELECT id, filepath, localpath, sha256, size FROM build_artifact WHERE build = ?"
let exists =
Caqti_type.octets ->! Caqti_type.bool @@
"SELECT EXISTS(SELECT 1 FROM build_artifact WHERE sha256 = ?)"
let add = let add =
Caqti_type.(t2 file (id `build)) ->. Caqti_type.unit @@ Caqti_type.(tup2 file (id `build)) ->. Caqti_type.unit @@
"INSERT INTO build_artifact (filepath, sha256, size, build) \ "INSERT INTO build_artifact (filepath, localpath, sha256, size, build) \
VALUES (?, ?, ?, ?)" VALUES (?, ?, ?, ?, ?)"
let remove_by_build = let remove_by_build =
id `build ->. Caqti_type.unit @@ id `build ->. Caqti_type.unit @@
@ -199,55 +196,34 @@ module Build = struct
script : Fpath.t; script : Fpath.t;
platform : string; platform : string;
main_binary : [`build_artifact] id option; main_binary : [`build_artifact] id option;
input_id : string option; input_id : Cstruct.t option;
user_id : [`user] id; user_id : [`user] id;
job_id : [`job] id; job_id : [`job] id;
} }
let pp ppf t =
Fmt.pf ppf "@[<hov>{ uuid=@ %a;@ \
start=@ %a;@ \
finish=@ %a;@ \
result=@ @[<hov>%a@];@ \
console=@ %a;@ \
script=@ %a;@ \
platform=@ %S;@ \
main_binary=@ @[<hov>%a@];@ \
input_id=@ @[<hov>%a@];@ \
user_id=@ %Lx;@ \
job_id=@ %Lx;@ }@]"
Uuidm.pp t.uuid
Ptime.pp t.start
Ptime.pp t.finish
Builder.pp_execution_result t.result
Fpath.pp t.console
Fpath.pp t.script
t.platform
Fmt.(Dump.option int64) t.main_binary
Fmt.(Dump.option string) t.input_id
t.user_id
t.job_id
let t = let t =
let rep = let rep =
Caqti_type.(t11 Caqti_type.(tup3
uuid (tup4
Rep.ptime uuid
Rep.ptime (tup2
execution_result Rep.ptime
fpath Rep.ptime)
fpath (tup2
string execution_result
(option (Rep.id `build_artifact)) fpath)
(option octets) (tup4
fpath
string
(option (Rep.id `build_artifact))
(option Rep.cstruct)))
(id `user) (id `user)
(id `job)) (id `job))
in in
let encode { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id } = let encode { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id } =
Ok (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id) Ok ((uuid, (start, finish), (result, console), (script, platform, main_binary, input_id)), user_id, job_id)
in in
let decode (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id) = let decode ((uuid, (start, finish), (result, console), (script, platform, main_binary, input_id)), user_id, job_id) =
Ok { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id } Ok { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id }
in in
Caqti_type.custom ~encode ~decode rep Caqti_type.custom ~encode ~decode rep
@ -282,7 +258,7 @@ module Build = struct
"DROP TABLE IF EXISTS build" "DROP TABLE IF EXISTS build"
let get_by_uuid = let get_by_uuid =
Rep.uuid ->? Caqti_type.t2 (id `build) t @@ Rep.uuid ->? Caqti_type.tup2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, {| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, result_code, result_msg,
console, script, platform, main_binary, input_id, user, job console, script, platform, main_binary, input_id, user, job
@ -291,7 +267,7 @@ module Build = struct
|} |}
let get_all = let get_all =
id `job ->* Caqti_type.t2 (id `build) t @@ id `job ->* Caqti_type.tup2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, {| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, result_code, result_msg, console,
script, platform, main_binary, input_id, user, job script, platform, main_binary, input_id, user, job
@ -301,20 +277,20 @@ module Build = struct
|} |}
let get_all_failed = let get_all_failed =
Caqti_type.(t3 int int (option string)) ->* Caqti_type.t2 Caqti_type.string t @@ Caqti_type.(tup3 int int (option string)) ->* Caqti_type.tup2 Caqti_type.string t @@
{| SELECT job.name, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps, {| SELECT job.name, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script, b.platform, b.result_code, b.result_msg, b.console, b.script, b.platform,
b.main_binary, b.input_id, b.user, b.job b.main_binary, b.input_id, b.user, b.job
FROM build b FROM build b
INNER JOIN job ON job.id = b.job INNER JOIN job ON job.id = b.job
WHERE b.main_binary IS NULL AND ($3 IS NULL OR b.platform = $3) WHERE b.result_code <> 0 AND ($3 IS NULL OR b.platform = $3)
ORDER BY start_d DESC, start_ps DESC ORDER BY start_d DESC, start_ps DESC
LIMIT $2 LIMIT $2
OFFSET $1 OFFSET $1
|} |}
let get_all_artifact_sha = let get_all_artifact_sha =
Caqti_type.(t2 (id `job) (option string)) ->* Caqti_type.octets @@ Caqti_type.(tup2 (id `job) (option string)) ->* Rep.cstruct @@
{| SELECT DISTINCT a.sha256 {| SELECT DISTINCT a.sha256
FROM build_artifact a, build b FROM build_artifact a, build b
WHERE b.job = $1 AND b.main_binary = a.id WHERE b.job = $1 AND b.main_binary = a.id
@ -323,79 +299,40 @@ module Build = struct
|} |}
let get_failed_builds = let get_failed_builds =
Caqti_type.(t2 (id `job) (option string)) ->* t @@ Caqti_type.(tup2 (id `job) (option string)) ->* t @@
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps, {| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script, result_code, result_msg, console, script,
platform, main_binary, input_id, user, job platform, main_binary, input_id, user, job
FROM build FROM build
WHERE job = $1 WHERE job = $1 AND result_code <> 0
AND main_binary IS NULL
AND ($2 IS NULL OR platform = $2) AND ($2 IS NULL OR platform = $2)
ORDER BY start_d DESC, start_ps DESC ORDER BY start_d DESC, start_ps DESC
|} |}
let get_latest_successful_with_binary = let get_latest_successful_with_binary =
Caqti_type.(t2 (id `job) string) ->? Caqti_type.t3 (id `build) t file @@ Caqti_type.(tup2 (id `job) string) ->? Caqti_type.tup3 (id `build) t file_opt @@
{| SELECT b.id, {| SELECT b.id,
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script, b.result_code, b.result_msg, b.console, b.script,
b.platform, b.main_binary, b.input_id, b.user, b.job, b.platform, b.main_binary, b.input_id, b.user, b.job,
a.filepath, a.sha256, a.size a.filepath, a.localpath, a.sha256, a.size
FROM build b, build_artifact a FROM build b
WHERE b.main_binary = a.id AND b.job = $1 AND b.platform = $2 LEFT JOIN build_artifact a ON
AND b.main_binary IS NOT NULL b.main_binary = a.id
WHERE b.job = $1 AND b.platform = $2 AND b.result_code = 0
ORDER BY b.start_d DESC, b.start_ps DESC ORDER BY b.start_d DESC, b.start_ps DESC
LIMIT 1 LIMIT 1
|} |}
let get_builds_older_than =
Caqti_type.(t3 (id `job) (option string) Rep.ptime) ->* Caqti_type.t2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script,
platform, main_binary, input_id, user, job
FROM build
WHERE job = $1
AND ($2 IS NULL OR platform = $2)
AND (finish_d < $3 OR (finish_d = $3 AND finish_ps <= $4))
ORDER BY start_d DESC, start_ps DESC
|}
let get_builds_excluding_latest_n =
Caqti_type.(t3 (id `job) (option string) int) ->* Caqti_type.t2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script,
platform, main_binary, input_id, user, job
FROM build
WHERE job = $1
AND ($2 IS NULL OR platform = $2)
ORDER BY start_d DESC, start_ps DESC
LIMIT -1 OFFSET $3
|}
(* "LIMIT -1 OFFSET n" is all rows except the first n *)
let get_nth_latest_successful =
Caqti_type.(t3 (id `job) (option string) int) ->? Caqti_type.t2 (id `build) t @@
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script,
platform, main_binary, input_id, user, job
FROM build
WHERE job = $1
AND ($2 IS NULL OR platform = $2)
AND main_binary IS NOT NULL
ORDER BY start_d DESC, start_ps DESC
LIMIT 1 OFFSET $3
|}
let get_latest_successful = let get_latest_successful =
Caqti_type.(t2 (id `job) (option string)) ->? t @@ Caqti_type.(tup2 (id `job) (option string)) ->? t @@
{| SELECT {| SELECT
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script, b.result_code, b.result_msg, b.console, b.script,
b.platform, b.main_binary, b.input_id, b.user, b.job b.platform, b.main_binary, b.input_id, b.user, b.job
FROM build b FROM build b
WHERE b.job = $1 WHERE b.job = $1 AND b.result_code = 0
AND ($2 IS NULL OR b.platform = $2) AND ($2 IS NULL OR b.platform = $2)
AND b.main_binary IS NOT NULL
ORDER BY b.start_d DESC, b.start_ps DESC ORDER BY b.start_d DESC, b.start_ps DESC
LIMIT 1 LIMIT 1
|} |}
@ -409,7 +346,7 @@ module Build = struct
FROM build b, build b0, build_artifact a, build_artifact a0 FROM build b, build b0, build_artifact a, build_artifact a0
WHERE b0.id = ? AND b0.job = b.job AND WHERE b0.id = ? AND b0.job = b.job AND
b.platform = b0.platform AND b.platform = b0.platform AND
b.main_binary IS NOT NULL AND b.result_code = 0 AND
a.id = b.main_binary AND a0.id = b0.main_binary AND a.id = b.main_binary AND a0.id = b0.main_binary AND
a.sha256 <> a0.sha256 AND a.sha256 <> a0.sha256 AND
(b0.start_d > b.start_d OR b0.start_d = b.start_d AND b0.start_ps > b.start_ps) (b0.start_d > b.start_d OR b0.start_d = b.start_d AND b0.start_ps > b.start_ps)
@ -426,7 +363,7 @@ module Build = struct
FROM build b, build b0, build_artifact a, build_artifact a0 FROM build b, build b0, build_artifact a, build_artifact a0
WHERE b0.id = ? AND b0.job = b.job AND WHERE b0.id = ? AND b0.job = b.job AND
b.platform = b0.platform AND b.platform = b0.platform AND
b.main_binary IS NOT NULL AND b.result_code = 0 AND
a.id = b.main_binary AND a0.id = b0.main_binary AND a.id = b.main_binary AND a0.id = b0.main_binary AND
a.sha256 <> a0.sha256 AND a.sha256 <> a0.sha256 AND
(b0.start_d < b.start_d OR b0.start_d = b.start_d AND b0.start_ps < b.start_ps) (b0.start_d < b.start_d OR b0.start_d = b.start_d AND b0.start_ps < b.start_ps)
@ -446,7 +383,7 @@ module Build = struct
|} |}
let get_same_input_different_output_hashes = let get_same_input_different_output_hashes =
id `build ->* Caqti_type.octets @@ id `build ->* Rep.cstruct @@
{| SELECT DISTINCT a.sha256 {| SELECT DISTINCT a.sha256
FROM build b0, build_artifact a0, build b, build_artifact a FROM build b0, build_artifact a0, build b, build_artifact a
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 <> a.sha256 WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 <> a.sha256
@ -455,7 +392,7 @@ module Build = struct
|} |}
let get_different_input_same_output_input_ids = let get_different_input_same_output_input_ids =
id `build ->* Caqti_type.octets @@ id `build ->* Rep.cstruct @@
{| SELECT DISTINCT b.input_id {| SELECT DISTINCT b.input_id
FROM build b0, build_artifact a0, build b, build_artifact a FROM build b0, build_artifact a0, build b, build_artifact a
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 = a.sha256 WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 = a.sha256
@ -463,7 +400,7 @@ module Build = struct
|} |}
let get_one_by_input_id = let get_one_by_input_id =
Caqti_type.octets ->! t @@ Rep.cstruct ->! t @@
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps, {| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
result_code, result_msg, console, script, result_code, result_msg, console, script,
platform, main_binary, input_id, user, job platform, main_binary, input_id, user, job
@ -475,7 +412,7 @@ module Build = struct
let get_platforms_for_job = let get_platforms_for_job =
id `job ->* Caqti_type.string @@ id `job ->* Caqti_type.string @@
"SELECT DISTINCT platform FROM build WHERE job = ? ORDER BY platform" "SELECT DISTINCT platform FROM build WHERE job = ?"
let add = let add =
t ->. Caqti_type.unit @@ t ->. Caqti_type.unit @@
@ -487,7 +424,7 @@ module Build = struct
|} |}
let get_by_hash = let get_by_hash =
Caqti_type.octets ->! t @@ Rep.cstruct ->! t @@
{| SELECT {| SELECT
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script, b.result_code, b.result_msg, b.console, b.script,
@ -500,11 +437,11 @@ module Build = struct
|} |}
let get_with_main_binary_by_hash = let get_with_main_binary_by_hash =
Caqti_type.octets ->! Caqti_type.t2 t file_opt @@ Rep.cstruct ->! Caqti_type.tup2 t file_opt @@
{| SELECT b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps, {| SELECT b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.console, b.script, b.result_code, b.result_msg, b.console, b.script,
b.platform, b.main_binary, b.input_id, b.user, b.job, b.platform, b.main_binary, b.input_id, b.user, b.job,
a.filepath, a.sha256, a.size a.filepath, a.localpath, a.sha256, a.size
FROM build_artifact a FROM build_artifact a
INNER JOIN build b ON b.id = a.build INNER JOIN build b ON b.id = a.build
WHERE a.sha256 = ? WHERE a.sha256 = ?
@ -513,7 +450,7 @@ module Build = struct
|} |}
let get_with_jobname_by_hash = let get_with_jobname_by_hash =
Caqti_type.octets ->? Caqti_type.t2 Caqti_type.string t @@ Rep.cstruct ->? Caqti_type.tup2 Caqti_type.string t @@
{| SELECT job.name, {| SELECT job.name,
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
b.result_code, b.result_msg, b.result_code, b.result_msg,
@ -527,7 +464,7 @@ module Build = struct
|} |}
let set_main_binary = let set_main_binary =
Caqti_type.t2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@ Caqti_type.tup2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@
"UPDATE build SET main_binary = $2 WHERE id = $1" "UPDATE build SET main_binary = $2 WHERE id = $1"
let remove = let remove =
@ -555,7 +492,7 @@ module User = struct
"DROP TABLE IF EXISTS user" "DROP TABLE IF EXISTS user"
let get_user = let get_user =
Caqti_type.string ->? Caqti_type.t2 (id `user) user_info @@ Caqti_type.string ->? Caqti_type.tup2 (id `user) user_info @@
{| SELECT id, username, password_hash, password_salt, {| SELECT id, username, password_hash, password_salt,
scrypt_n, scrypt_r, scrypt_p, restricted scrypt_n, scrypt_r, scrypt_p, restricted
FROM user FROM user
@ -609,15 +546,15 @@ module Access_list = struct
"DROP TABLE IF EXISTS access_list" "DROP TABLE IF EXISTS access_list"
let get = let get =
Caqti_type.t2 (id `user) (id `job) ->! id `access_list @@ Caqti_type.tup2 (id `user) (id `job) ->! id `access_list @@
"SELECT id FROM access_list WHERE user = ? AND job = ?" "SELECT id FROM access_list WHERE user = ? AND job = ?"
let add = let add =
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@ Caqti_type.tup2 (id `user) (id `job) ->. Caqti_type.unit @@
"INSERT INTO access_list (user, job) VALUES (?, ?)" "INSERT INTO access_list (user, job) VALUES (?, ?)"
let remove = let remove =
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@ Caqti_type.tup2 (id `user) (id `job) ->. Caqti_type.unit @@
"DELETE FROM access_list WHERE user = ? AND job = ?" "DELETE FROM access_list WHERE user = ? AND job = ?"
let remove_by_job = let remove_by_job =
@ -648,15 +585,13 @@ let migrate = [
Caqti_type.unit ->. Caqti_type.unit @@ Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)"; "CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)";
Caqti_type.unit ->. Caqti_type.unit @@ Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE main_binary IS NULL"; "CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE result_code <> 0";
Caqti_type.unit ->. Caqti_type.unit @@ Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_input_id ON build(input_id)"; "CREATE INDEX idx_build_input_id ON build(input_id)";
Caqti_type.unit ->. Caqti_type.unit @@ Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_main_binary ON build(main_binary)"; "CREATE INDEX idx_build_main_binary ON build(main_binary)";
Caqti_type.unit ->. Caqti_type.unit @@ Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)"; "CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)";
Caqti_type.unit ->. Caqti_type.unit @@
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)";
set_current_version; set_current_version;
set_application_id; set_application_id;
] ]
@ -670,8 +605,6 @@ let rollback = [
Build.rollback; Build.rollback;
Job.rollback; Job.rollback;
Caqti_type.unit ->. Caqti_type.unit @@ Caqti_type.unit ->. Caqti_type.unit @@
"DROP INDEX IF EXISTS idx_build_artifact_build";
Caqti_type.unit ->. Caqti_type.unit @@
"DROP INDEX IF EXISTS idx_build_artifact_sha256"; "DROP INDEX IF EXISTS idx_build_artifact_sha256";
Caqti_type.unit ->. Caqti_type.unit @@ Caqti_type.unit ->. Caqti_type.unit @@
"DROP INDEX IF EXISTS idx_build_failed"; "DROP INDEX IF EXISTS idx_build_failed";

View file

@ -3,7 +3,8 @@ module Rep : sig
type 'a id type 'a id
type file = { type file = {
filepath : Fpath.t; filepath : Fpath.t;
sha256 : string; localpath : Fpath.t;
sha256 : Cstruct.t;
size : int; size : int;
} }
@ -13,6 +14,7 @@ module Rep : sig
val uuid : Uuidm.t Caqti_type.t val uuid : Uuidm.t Caqti_type.t
val ptime : Ptime.t Caqti_type.t val ptime : Ptime.t Caqti_type.t
val fpath : Fpath.t Caqti_type.t val fpath : Fpath.t Caqti_type.t
val cstruct : Cstruct.t Caqti_type.t
val file : file Caqti_type.t val file : file Caqti_type.t
val execution_result : Builder.execution_result Caqti_type.t val execution_result : Builder.execution_result Caqti_type.t
val console : (int * string) list Caqti_type.t val console : (int * string) list Caqti_type.t
@ -21,7 +23,8 @@ type 'a id = 'a Rep.id
type file = Rep.file = { type file = Rep.file = {
filepath : Fpath.t; filepath : Fpath.t;
sha256 : string; localpath : Fpath.t;
sha256 : Cstruct.t;
size : int; size : int;
} }
@ -84,7 +87,6 @@ module Build_artifact : sig
Caqti_request.t Caqti_request.t
val get_all_by_build : val get_all_by_build :
([`build] id, [`build_artifact] id * file, [ `Many | `One | `Zero ]) Caqti_request.t ([`build] id, [`build_artifact] id * file, [ `Many | `One | `Zero ]) Caqti_request.t
val exists : (string, bool, [ `One ]) Caqti_request.t
val add : val add :
(file * [`build] id, unit, [ `Zero ]) Caqti_request.t (file * [`build] id, unit, [ `Zero ]) Caqti_request.t
val remove_by_build : val remove_by_build :
@ -104,13 +106,11 @@ sig
script : Fpath.t; script : Fpath.t;
platform : string; platform : string;
main_binary : [`build_artifact] id option; main_binary : [`build_artifact] id option;
input_id : string option; input_id : Cstruct.t option;
user_id : [`user] id; user_id : [`user] id;
job_id : [`job] id; job_id : [`job] id;
} }
val pp : t Fmt.t
val get_by_uuid : val get_by_uuid :
(Uuidm.t, [`build] id * t, [ `One | `Zero ]) (Uuidm.t, [`build] id * t, [ `One | `Zero ])
Caqti_request.t Caqti_request.t
@ -119,21 +119,15 @@ sig
val get_all_failed : val get_all_failed :
(int * int * string option, string * t, [ `Many | `One | `Zero ]) Caqti_request.t (int * int * string option, string * t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_all_artifact_sha : val get_all_artifact_sha :
([`job] id * string option, string, [ `Many | `One | `Zero ]) Caqti_request.t ([`job] id * string option, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_latest_successful_with_binary : val get_latest_successful_with_binary :
([`job] id * string, [`build] id * t * file, [ `One | `Zero ]) ([`job] id * string, [`build] id * t * file option, [ `One | `Zero ])
Caqti_request.t Caqti_request.t
val get_failed_builds : val get_failed_builds :
([`job] id * string option, t, [ `Many | `One | `Zero ]) Caqti_request.t ([`job] id * string option, t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_latest_successful : val get_latest_successful :
([`job] id * string option, t, [ `One | `Zero ]) ([`job] id * string option, t, [ `One | `Zero ])
Caqti_request.t Caqti_request.t
val get_builds_older_than :
([`job] id * string option * Ptime.t, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_builds_excluding_latest_n :
([`job] id * string option * int, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_nth_latest_successful :
([`job] id * string option * int, [`build] id * t, [ `One | `Zero ]) Caqti_request.t
val get_previous_successful_different_output : val get_previous_successful_different_output :
([`build] id, t, [ `One | `Zero ]) ([`build] id, t, [ `One | `Zero ])
Caqti_request.t Caqti_request.t
@ -143,20 +137,20 @@ sig
val get_same_input_same_output_builds : val get_same_input_same_output_builds :
([`build] id, t, [ `Many | `One | `Zero ]) Caqti_request.t ([`build] id, t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_same_input_different_output_hashes : val get_same_input_different_output_hashes :
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t ([`build] id, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_different_input_same_output_input_ids : val get_different_input_same_output_input_ids :
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t ([`build] id, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
val get_one_by_input_id : val get_one_by_input_id :
(string, t, [ `One ]) Caqti_request.t (Cstruct.t, t, [ `One ]) Caqti_request.t
val get_platforms_for_job : val get_platforms_for_job :
([`job] id, string, [ `Many | `One | `Zero ]) Caqti_request.t ([`job] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
val add : (t, unit, [ `Zero ]) Caqti_request.t val add : (t, unit, [ `Zero ]) Caqti_request.t
val get_by_hash : val get_by_hash :
(string, t, [ `One]) Caqti_request.t (Cstruct.t, t, [ `One]) Caqti_request.t
val get_with_main_binary_by_hash : val get_with_main_binary_by_hash :
(string, t * file option, [ `One]) Caqti_request.t (Cstruct.t, t * file option, [ `One]) Caqti_request.t
val get_with_jobname_by_hash : val get_with_jobname_by_hash :
(string, string * t, [ `One | `Zero]) Caqti_request.t (Cstruct.t, string * t, [ `One | `Zero]) Caqti_request.t
val set_main_binary : ([`build] id * [`build_artifact] id, unit, [ `Zero ]) Caqti_request.t val set_main_binary : ([`build] id * [`build_artifact] id, unit, [ `Zero ]) Caqti_request.t
val remove : ([`build] id, unit, [ `Zero]) Caqti_request.t val remove : ([`build] id, unit, [ `Zero]) Caqti_request.t
end end

View file

@ -1,3 +1,3 @@
(library (library
(name builder_db) (name builder_db)
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators builder_web_auth)) (libraries builder caqti caqti-driver-sqlite3 asn1-combinators mirage-crypto builder_web_auth))

View file

@ -1,8 +1,8 @@
module Asn = struct module Asn = struct
let decode_strict codec cs = let decode_strict codec cs =
match Asn.decode codec cs with match Asn.decode codec cs with
| Ok (a, rest) -> | Ok (a, cs) ->
if String.length rest = 0 if Cstruct.length cs = 0
then Ok a then Ok a
else Error "trailing bytes" else Error "trailing bytes"
| Error (`Parse msg) -> Error ("parse error: " ^ msg) | Error (`Parse msg) -> Error ("parse error: " ^ msg)
@ -17,7 +17,7 @@ module Asn = struct
(required ~label:"delta" int) (required ~label:"delta" int)
(required ~label:"data" utf8_string))) (required ~label:"data" utf8_string)))
let console_of_str, console_to_str = projections_of console let console_of_cs, console_to_cs = projections_of console
end end
type untyped_id = int64 type untyped_id = int64
@ -30,7 +30,8 @@ let id_to_int64 (id : 'a id) : int64 = id
type file = { type file = {
filepath : Fpath.t; filepath : Fpath.t;
sha256 : string; localpath : Fpath.t;
sha256 : Cstruct.t;
size : int; size : int;
} }
@ -47,7 +48,7 @@ let ptime =
let encode t = Ok (Ptime.Span.to_d_ps (Ptime.to_span t)) in let encode t = Ok (Ptime.Span.to_d_ps (Ptime.to_span t)) in
let decode (d, ps) = Ok (Ptime.v (d, ps)) let decode (d, ps) = Ok (Ptime.v (d, ps))
in in
let rep = Caqti_type.(t2 int int64) in let rep = Caqti_type.(tup2 int int64) in
Caqti_type.custom ~encode ~decode rep Caqti_type.custom ~encode ~decode rep
let fpath = let fpath =
@ -56,25 +57,30 @@ let fpath =
|> Result.map_error (fun (`Msg s) -> s) in |> Result.map_error (fun (`Msg s) -> s) in
Caqti_type.custom ~encode ~decode Caqti_type.string Caqti_type.custom ~encode ~decode Caqti_type.string
let cstruct =
let encode t = Ok (Cstruct.to_string t) in
let decode s = Ok (Cstruct.of_string s) in
Caqti_type.custom ~encode ~decode Caqti_type.octets
let file = let file =
let encode { filepath; sha256; size } = let encode { filepath; localpath; sha256; size } =
Ok (filepath, sha256, size) in Ok (filepath, localpath, sha256, size) in
let decode (filepath, sha256, size) = let decode (filepath, localpath, sha256, size) =
Ok { filepath; sha256; size } in Ok { filepath; localpath; sha256; size } in
Caqti_type.custom ~encode ~decode Caqti_type.(t3 fpath octets int) Caqti_type.custom ~encode ~decode Caqti_type.(tup4 fpath fpath cstruct int)
let file_opt = let file_opt =
let rep = Caqti_type.(t3 (option fpath) (option octets) (option int)) in let rep = Caqti_type.(tup4 (option fpath) (option fpath) (option cstruct) (option int)) in
let encode = function let encode = function
| Some { filepath; sha256; size } -> | Some { filepath; localpath; sha256; size } ->
Ok (Some filepath, Some sha256, Some size) Ok (Some filepath, Some localpath, Some sha256, Some size)
| None -> | None ->
Ok (None, None, None) Ok (None, None, None, None)
in in
let decode = function let decode = function
| (Some filepath, Some sha256, Some size) -> | (Some filepath, Some localpath, Some sha256, Some size) ->
Ok (Some { filepath; sha256; size }) Ok (Some { filepath; localpath; sha256; size })
| (None, None, None) -> | (None, None, None, None) ->
Ok None Ok None
| _ -> | _ ->
(* This should not happen if the database is well-formed *) (* This should not happen if the database is well-formed *)
@ -103,25 +109,25 @@ let execution_result =
else else
Error "bad encoding (unknown number)" Error "bad encoding (unknown number)"
in in
let rep = Caqti_type.(t2 int (option string)) in let rep = Caqti_type.(tup2 int (option string)) in
Caqti_type.custom ~encode ~decode rep Caqti_type.custom ~encode ~decode rep
let console = let console =
let encode console = Ok (Asn.console_to_str console) in let encode console = Ok (Asn.console_to_cs console) in
let decode data = Asn.console_of_str data in let decode data = Asn.console_of_cs data in
Caqti_type.(custom ~encode ~decode octets) Caqti_type.custom ~encode ~decode cstruct
let user_info = let user_info =
let rep = Caqti_type.(t7 string octets octets int int int bool) in let rep = Caqti_type.(tup4 string cstruct cstruct (tup4 int int int bool)) in
let encode { Builder_web_auth.username; let encode { Builder_web_auth.username;
password_hash = `Scrypt (password_hash, password_salt, { password_hash = `Scrypt (password_hash, password_salt, {
Builder_web_auth.scrypt_n; scrypt_r; scrypt_p Builder_web_auth.scrypt_n; scrypt_r; scrypt_p
}); });
restricted; } restricted; }
= =
Ok (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) Ok (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted))
in in
let decode (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) = let decode (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted)) =
Ok { Builder_web_auth.username; Ok { Builder_web_auth.username;
password_hash = password_hash =
`Scrypt (password_hash, password_salt, `Scrypt (password_hash, password_salt,

View file

@ -1,3 +1,2 @@
(lang dune 2.7) (lang dune 2.7)
(name builder-web) (name builder-web)
(formatting disabled)

View file

@ -26,7 +26,7 @@ let init_datadir datadir =
let init dbpath datadir = let init dbpath datadir =
Result.bind (init_datadir datadir) @@ fun () -> Result.bind (init_datadir datadir) @@ fun () ->
Lwt_main.run ( Lwt_main.run (
Caqti_lwt_unix.connect Caqti_lwt.connect
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ()) (Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
>>= fun (module Db : Caqti_lwt.CONNECTION) -> >>= fun (module Db : Caqti_lwt.CONNECTION) ->
Db.find Builder_db.get_application_id () >>= fun application_id -> Db.find Builder_db.get_application_id () >>= fun application_id ->
@ -46,46 +46,28 @@ let safe_seg path =
else Fmt.kstr (fun s -> Error (`Msg s)) "unsafe path %S" path else Fmt.kstr (fun s -> Error (`Msg s)) "unsafe path %S" path
(* mime lookup with orb knowledge *) (* mime lookup with orb knowledge *)
let append_charset = function
(* mime types from nginx:
http://nginx.org/en/docs/http/ngx_http_charset_module.html#charset_types *)
| "text/html" | "text/xml" | "text/plain" | "text/vnd.wap.wml"
| "application/javascript" | "application/rss+xml" | "application/atom+xml"
as content_type ->
content_type ^ "; charset=utf-8" (* default to utf-8 *)
| content_type -> content_type
let mime_lookup path = let mime_lookup path =
append_charset match Fpath.to_string path with
(match Fpath.to_string path with | "build-environment" | "opam-switch" | "system-packages" ->
| "build-environment" | "opam-switch" | "system-packages" -> "text/plain"
"text/plain" | _ ->
| _ -> if Fpath.has_ext "build-hashes" path
if Fpath.has_ext "build-hashes" path then "text/plain"
then "text/plain" else if Fpath.is_prefix Fpath.(v "bin/") path
else if Fpath.is_prefix Fpath.(v "bin/") path then "application/octet-stream"
then "application/octet-stream" else Magic_mime.lookup (Fpath.to_string path)
else Magic_mime.lookup (Fpath.to_string path))
let string_of_html = let string_of_html =
Format.asprintf "%a" (Tyxml.Html.pp ()) Format.asprintf "%a" (Tyxml.Html.pp ())
let is_accept_json req = let or_error_response r =
match Dream.header req "Accept" with
| Some accept when String.starts_with ~prefix:"application/json" accept ->
true
| _ -> false
let or_error_response req r =
let* r = r in let* r = r in
match r with match r with
| Ok response -> Lwt.return response | Ok response -> Lwt.return response
| Error (text, status) -> | Error (text, `Not_Found) ->
if is_accept_json req then Views.resource_not_found ~text
let json_response = Yojson.Basic.to_string (`Assoc [ "error", `String text ]) in |> string_of_html |> Dream.html ~status:`Not_Found
Dream.json ~status json_response | Error (text, status) -> Dream.respond ~status text
else
Dream.respond ~status text
let default_log_warn ~status e = let default_log_warn ~status e =
Log.warn (fun m -> m "%s: %a" (Dream.status_to_string status) pp_error e) Log.warn (fun m -> m "%s: %a" (Dream.status_to_string status) pp_error e)
@ -103,13 +85,6 @@ let if_error
Lwt_result.fail (message, status) Lwt_result.fail (message, status)
| Ok _ as r -> Lwt.return r | Ok _ as r -> Lwt.return r
let not_found_error r =
let* r = r in
match r with
| Error `Not_found ->
Lwt_result.fail ("Resource not found", `Not_Found)
| Ok _ as r -> Lwt.return r
let get_uuid s = let get_uuid s =
Lwt.return Lwt.return
(if String.length s = 36 then (if String.length s = 36 then
@ -124,11 +99,11 @@ let main_binary_of_uuid uuid db =
|> if_error "Error getting job build" |> if_error "Error getting job build"
~log:(fun e -> Log.warn (fun m -> m "Error getting job build: %a" pp_error e)) ~log:(fun e -> Log.warn (fun m -> m "Error getting job build: %a" pp_error e))
>>= fun (_id, build) -> >>= fun (_id, build) ->
Model.not_found build.Builder_db.Build.main_binary match build.Builder_db.Build.main_binary with
|> not_found_error | None -> Lwt_result.fail ("Resource not found", `Not_Found)
>>= fun main_binary -> | Some main_binary ->
Model.build_artifact_by_id main_binary db Model.build_artifact_by_id main_binary db
|> if_error "Error getting main binary" |> if_error "Error getting main binary"
module Viz_aux = struct module Viz_aux = struct
@ -145,14 +120,14 @@ module Viz_aux = struct
viz_dir ~cachedir ~viz_typ ~version viz_dir ~cachedir ~viz_typ ~version
/ input_hash + "html" / input_hash + "html"
) )
let choose_versioned_viz_path let choose_versioned_viz_path
~cachedir ~cachedir
~viz_typ ~viz_typ
~viz_input_hash ~viz_input_hash
~current_version = ~current_version =
let ( >>= ) = Result.bind in let ( >>= ) = Result.bind in
let rec aux current_version = let rec aux current_version =
let path = let path =
viz_path ~cachedir viz_path ~cachedir
~viz_typ ~viz_typ
@ -164,7 +139,7 @@ module Viz_aux = struct
Error (`Msg (Fmt.str "viz '%s': There exist no version of the requested \ Error (`Msg (Fmt.str "viz '%s': There exist no version of the requested \
visualization" visualization"
(viz_type_to_string viz_typ))) (viz_type_to_string viz_typ)))
else else
aux @@ pred current_version aux @@ pred current_version
) )
in in
@ -173,7 +148,7 @@ module Viz_aux = struct
let get_viz_version_from_dirs ~cachedir ~viz_typ = let get_viz_version_from_dirs ~cachedir ~viz_typ =
let ( >>= ) = Result.bind in let ( >>= ) = Result.bind in
Bos.OS.Dir.contents cachedir >>= fun versioned_dirs -> Bos.OS.Dir.contents cachedir >>= fun versioned_dirs ->
let max_cached_version = let max_cached_version =
let viz_typ_str = viz_type_to_string viz_typ ^ "_" in let viz_typ_str = viz_type_to_string viz_typ ^ "_" in
versioned_dirs versioned_dirs
|> List.filter_map (fun versioned_dir -> |> List.filter_map (fun versioned_dir ->
@ -182,7 +157,7 @@ module Viz_aux = struct
Logs.warn (fun m -> m "%s" err); Logs.warn (fun m -> m "%s" err);
None None
| Ok false -> None | Ok false -> None
| Ok true -> | Ok true ->
let dir_str = Fpath.filename versioned_dir in let dir_str = Fpath.filename versioned_dir in
if not (String.starts_with ~prefix:viz_typ_str dir_str) then if not (String.starts_with ~prefix:viz_typ_str dir_str) then
None None
@ -210,6 +185,10 @@ module Viz_aux = struct
let hash_viz_input ~uuid typ db = let hash_viz_input ~uuid typ db =
let open Builder_db in let open Builder_db in
let hex cstruct =
let `Hex hex_str = Hex.of_cstruct cstruct in
hex_str
in
main_binary_of_uuid uuid db >>= fun main_binary -> main_binary_of_uuid uuid db >>= fun main_binary ->
Model.build uuid db Model.build uuid db
|> if_error "Error getting build" >>= fun (build_id, _build) -> |> if_error "Error getting build" >>= fun (build_id, _build) ->
@ -217,30 +196,32 @@ module Viz_aux = struct
|> if_error "Error getting build artifacts" >>= fun artifacts -> |> if_error "Error getting build artifacts" >>= fun artifacts ->
match typ with match typ with
| `Treemap -> | `Treemap ->
let debug_binary = let debug_binary =
let bin = Fpath.base main_binary.filepath in let bin = Fpath.base main_binary.localpath in
List.find_opt List.find_opt
(fun p -> Fpath.(equal (bin + "debug") (base p.filepath))) (fun p -> Fpath.(equal (bin + "debug") (base p.localpath)))
artifacts artifacts
in in
begin begin
Model.not_found debug_binary match debug_binary with
|> not_found_error >>= fun debug_binary -> | None -> Lwt_result.fail ("Error getting debug-binary", `Not_Found)
debug_binary.sha256 | Some debug_binary ->
|> Ohex.encode debug_binary.sha256
|> Lwt_result.return |> hex
end |> Lwt_result.return
| `Dependencies -> end
| `Dependencies ->
let opam_switch = let opam_switch =
List.find_opt List.find_opt
(fun p -> Fpath.(equal (v "opam-switch") (base p.filepath))) (fun p -> Fpath.(equal (v "opam-switch") (base p.localpath)))
artifacts artifacts
in in
Model.not_found opam_switch match opam_switch with
|> not_found_error >>= fun opam_switch -> | None -> Lwt_result.fail ("Error getting opam-switch", `Not_Found)
opam_switch.sha256 | Some opam_switch ->
|> Ohex.encode opam_switch.sha256
|> Lwt_result.return |> hex
|> Lwt_result.return
let try_load_cached_visualization ~cachedir ~uuid viz_typ db = let try_load_cached_visualization ~cachedir ~uuid viz_typ db =
Lwt.return (get_viz_version_from_dirs ~cachedir ~viz_typ) Lwt.return (get_viz_version_from_dirs ~cachedir ~viz_typ)
@ -254,28 +235,29 @@ module Viz_aux = struct
|> Lwt.return |> Lwt.return
|> if_error "Error finding a version of the requested visualization") |> if_error "Error finding a version of the requested visualization")
>>= fun viz_path -> >>= fun viz_path ->
Lwt_result.catch (fun () -> Lwt_result.catch (
Lwt_io.with_file ~mode:Lwt_io.Input Lwt_io.with_file ~mode:Lwt_io.Input
(Fpath.to_string viz_path) (Fpath.to_string viz_path)
Lwt_io.read Lwt_io.read
) )
|> Lwt_result.map_error (fun exn -> `Msg (Printexc.to_string exn)) |> Lwt_result.map_err (fun exn -> `Msg (Printexc.to_string exn))
|> if_error "Error getting cached visualization" |> if_error "Error getting cached visualization"
end end
let get_postfix_target req =
let target = Dream.target req in
let len_target = String.length target in
let len_prefix = String.length @@ Dream.prefix req in
let postfix_target = String.sub target len_prefix (len_target - len_prefix) in
if String.starts_with ~prefix:"/" postfix_target then
String.sub postfix_target 1 (String.length postfix_target - 1)
else
postfix_target
let routes ~datadir ~cachedir ~configdir ~expired_jobs =
let builds ~all ?(filter_builds_later_than = 0) req = let routes ~datadir ~cachedir ~configdir =
let than = let builds req =
if filter_builds_later_than = 0 then
Ptime.epoch
else
let n = Ptime.Span.v (filter_builds_later_than, 0L) in
let now = Ptime_clock.now () in
Ptime.Span.sub (Ptime.to_span now) n |> Ptime.of_span |>
Option.fold ~none:Ptime.epoch ~some:Fun.id
in
Dream.sql req Model.jobs_with_section_synopsis Dream.sql req Model.jobs_with_section_synopsis
|> if_error "Error getting jobs" |> if_error "Error getting jobs"
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e)) ~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
@ -288,29 +270,20 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
r >>= fun acc -> r >>= fun acc ->
Dream.sql req (Model.build_with_main_binary job_id platform) >>= function Dream.sql req (Model.build_with_main_binary job_id platform) >>= function
| Some (build, artifact) -> | Some (build, artifact) ->
if Ptime.is_later ~than build.finish then Lwt_result.return ((platform, build, artifact) :: acc)
Lwt_result.return ((platform, build, artifact) :: acc)
else
Lwt_result.return acc
| None -> | None ->
Log.warn (fun m -> m "Job without builds: %s" job_name); Log.warn (fun m -> m "Job without builds: %s" job_name);
Lwt_result.return acc) Lwt_result.return acc)
ps (Lwt_result.return []) >>= fun platform_builds -> ps (Lwt_result.return []) >>= fun platform_builds ->
if platform_builds = [] then let v = (job_name, synopsis, platform_builds) in
Lwt_result.return acc let section = Option.value ~default:"Uncategorized" section in
else Lwt_result.return (Utils.String_map.add_or_create section v acc))
let v = (job_name, synopsis, platform_builds) in
let section = Option.value ~default:"Uncategorized" section in
Lwt_result.return (Utils.String_map.add_or_create section v acc))
jobs jobs
(Lwt_result.return Utils.String_map.empty) (Lwt_result.return Utils.String_map.empty)
|> if_error "Error getting jobs" |> if_error "Error getting jobs"
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e)) ~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
>>= fun jobs -> >>= fun jobs ->
if is_accept_json req then Views.Builds.make jobs |> string_of_html |> Dream.html |> Lwt_result.ok
Views.Builds.make_json ~all jobs |> Yojson.Basic.to_string |> Dream.json |> Lwt_result.ok
else
Views.Builds.make ~all jobs |> string_of_html |> Dream.html |> Lwt_result.ok
in in
let job req = let job req =
@ -322,13 +295,8 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|> if_error "Error getting job" |> if_error "Error getting job"
~log:(fun e -> Log.warn (fun m -> m "Error getting job: %a" pp_error e)) ~log:(fun e -> Log.warn (fun m -> m "Error getting job: %a" pp_error e))
>>= fun (readme, builds) -> >>= fun (readme, builds) ->
if is_accept_json req then Views.Job.make ~failed:false ~job_name ~platform ~readme builds
Views.Job.make_json ~failed:false ~job_name ~platform ~readme builds |> string_of_html |> Dream.html |> Lwt_result.ok
|> Yojson.Basic.to_string
|> Dream.json |> Lwt_result.ok
else
Views.Job.make ~failed:false ~job_name ~platform ~readme builds
|> string_of_html |> Dream.html |> Lwt_result.ok
in in
let job_with_failed req = let job_with_failed req =
@ -340,52 +308,31 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|> if_error "Error getting job" |> if_error "Error getting job"
~log:(fun e -> Log.warn (fun m -> m "Error getting job: %a" pp_error e)) ~log:(fun e -> Log.warn (fun m -> m "Error getting job: %a" pp_error e))
>>= fun (readme, builds) -> >>= fun (readme, builds) ->
if is_accept_json req then Views.Job.make ~failed:true ~job_name ~platform ~readme builds
Views.Job.make_json ~failed:true ~job_name ~platform ~readme builds |> string_of_html |> Dream.html |> Lwt_result.ok
|> Yojson.Basic.to_string
|> Dream.json |> Lwt_result.ok
else
Views.Job.make ~failed:true ~job_name ~platform ~readme builds
|> string_of_html |> Dream.html |> Lwt_result.ok
in in
let redirect_latest req ~job_name ~platform ~artifact = let redirect_latest req =
let job_name = Dream.param req "job" in
let platform = Dream.query req "platform"
and postfix_target = get_postfix_target req in
(Dream.sql req (Model.job_id job_name) >>= Model.not_found >>= fun job_id -> (Dream.sql req (Model.job_id job_name) >>= Model.not_found >>= fun job_id ->
Dream.sql req (Model.latest_successful_build_uuid job_id platform)) Dream.sql req (Model.latest_successful_build_uuid job_id platform))
>>= Model.not_found >>= Model.not_found
|> if_error "Error getting job" >>= fun build -> |> if_error "Error getting job" >>= fun build ->
Dream.redirect req Dream.redirect req
(Link.Job_build_artifact.make_from_string ~job_name ~build ~artifact ()) (Fmt.str "/job/%s/build/%a/%s" job_name Uuidm.pp build postfix_target)
|> Lwt_result.ok |> Lwt_result.ok
in in
let redirect_latest req =
let job_name = Dream.param req "job" in
let platform = Dream.query req "platform" in
let artifact =
(* FIXME Dream.path deprecated *)
let path = begin[@alert "-deprecated"] Dream.path req end in
if path = [] then
"" (* redirect without trailing slash *)
else
"/" ^ (List.map Uri.pct_encode path |> String.concat "/")
in
redirect_latest req ~job_name ~platform ~artifact
and redirect_latest_no_slash req =
let job_name = Dream.param req "job" in
let platform = Dream.query req "platform" in
redirect_latest req ~job_name ~platform ~artifact:""
in
let redirect_main_binary req = let redirect_main_binary req =
let job_name = Dream.param req "job" let job_name = Dream.param req "job"
and build = Dream.param req "build" in and build = Dream.param req "build" in
get_uuid build >>= fun uuid -> get_uuid build >>= fun uuid ->
Dream.sql req (main_binary_of_uuid uuid) >>= fun main_binary -> Dream.sql req (main_binary_of_uuid uuid) >>= fun main_binary ->
let artifact = `File main_binary.Builder_db.filepath in Dream.redirect req
Link.Job_build_artifact.make ~job_name ~build:uuid ~artifact () (Fmt.str "/job/%s/build/%a/f/%a" job_name Uuidm.pp uuid
|> Dream.redirect req Fpath.pp main_binary.Builder_db.filepath)
|> Lwt_result.ok |> Lwt_result.ok
in in
@ -421,55 +368,41 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
~log:(fun e -> Log.warn (fun m -> m "Error getting job build: %a" pp_error e)) ~log:(fun e -> Log.warn (fun m -> m "Error getting job build: %a" pp_error e))
>>= fun (build, main_binary, artifacts, same_input_same_output, different_input_same_output, same_input_different_output, latest, next, previous) -> >>= fun (build, main_binary, artifacts, same_input_same_output, different_input_same_output, same_input_different_output, latest, next, previous) ->
let solo5_manifest = Option.bind main_binary (Model.solo5_manifest datadir) in let solo5_manifest = Option.bind main_binary (Model.solo5_manifest datadir) in
if is_accept_json req then Views.Job_build.make
let json_response = ~name:job_name
`Assoc [ ~build
"job", `String job_name; ~artifacts
"uuid", `String (Uuidm.to_string build.uuid); ~main_binary
"platform", `String build.platform; ~solo5_manifest
"start_time", `String (Ptime.to_rfc3339 build.start); ~same_input_same_output
"finish_time", `String (Ptime.to_rfc3339 build.finish); ~different_input_same_output
"main_binary", (match build.main_binary with Some _ -> `Bool true | None -> `Bool false) ~same_input_different_output
] |> Yojson.Basic.to_string ~latest ~next ~previous
in |> string_of_html |> Dream.html |> Lwt_result.ok
Dream.json ~status:`OK json_response |> Lwt_result.ok
else
Views.Job_build.make
~job_name
~build
~artifacts
~main_binary
~solo5_manifest
~same_input_same_output
~different_input_same_output
~same_input_different_output
~latest ~next ~previous
|> string_of_html |> Dream.html |> Lwt_result.ok
in in
let job_build_file req = let job_build_file req =
let _job_name = Dream.param req "job" let _job_name = Dream.param req "job"
and build = Dream.param req "build" and build = Dream.param req "build"
(* FIXME *) and postfix_target = get_postfix_target req in
and filepath = begin[@alert "-deprecated"] Dream.path req |> String.concat "/" end in
let if_none_match = Dream.header req "if-none-match" in let if_none_match = Dream.header req "if-none-match" in
(* XXX: We don't check safety of [file]. This should be fine however since (* XXX: We don't check safety of [file]. This should be fine however since
* we don't use [file] for the filesystem but is instead used as a key for * we don't use [file] for the filesystem but is instead used as a key for
* lookup in the data table of the 'full' file. *) * lookup in the data table of the 'full' file. *)
get_uuid build >>= fun build -> get_uuid build >>= fun build ->
Fpath.of_string filepath |> Lwt_result.lift Fpath.of_string postfix_target |> Lwt_result.lift
|> if_error ~status:`Not_Found "File not found" >>= fun filepath -> |> if_error ~status:`Not_Found "File not found" >>= fun filepath ->
Dream.sql req (Model.build_artifact build filepath) Dream.sql req (Model.build_artifact build filepath)
|> if_error "Error getting build artifact" >>= fun file -> |> if_error "Error getting build artifact" >>= fun file ->
let etag = Base64.encode_string file.Builder_db.sha256 in let etag = Base64.encode_string (Cstruct.to_string file.Builder_db.sha256) in
match if_none_match with match if_none_match with
| Some etag' when etag = etag' -> | Some etag' when etag = etag' ->
Dream.empty `Not_Modified |> Lwt_result.ok Dream.empty `Not_Modified |> Lwt_result.ok
| _ -> | _ ->
Model.build_artifact_data datadir file Model.build_artifact_data datadir file
|> if_error "Error getting build artifact" |> if_error "Error getting build artifact"
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a: %a" ~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a in %a: %a"
Fpath.pp file.Builder_db.filepath Fpath.pp file.Builder_db.filepath Fpath.pp file.Builder_db.localpath
pp_error e)) >>= fun data -> pp_error e)) >>= fun data ->
let headers = [ let headers = [
"Content-Type", mime_lookup file.Builder_db.filepath; "Content-Type", mime_lookup file.Builder_db.filepath;
@ -490,7 +423,7 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|> if_error "Error getting data" |> if_error "Error getting data"
~log:(fun e -> Log.warn (fun m -> m "Error getting script or console data for build %a: %a" ~log:(fun e -> Log.warn (fun m -> m "Error getting script or console data for build %a: %a"
Uuidm.pp build pp_error e)) >>= fun data -> Uuidm.pp build pp_error e)) >>= fun data ->
let headers = [ "Content-Type", "text/plain; charset=utf-8" ] in let headers = [ "Content-Type", "text/plain" ] in
Dream.respond ~headers data |> Lwt_result.ok Dream.respond ~headers data |> Lwt_result.ok
in in
@ -519,33 +452,16 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|> Option.to_result ~none:(`Msg "bad finish time") |> Result.map Int64.of_int |> Option.to_result ~none:(`Msg "bad finish time") |> Result.map Int64.of_int
|> Lwt.return |> if_error "Internal server error" >>= fun finish -> |> Lwt.return |> if_error "Internal server error" >>= fun finish ->
Dream.stream ~headers:["Content-Type", "application/tar+gzip"] Dream.stream ~headers:["Content-Type", "application/tar+gzip"]
(fun stream -> (Dream_tar.targz_response datadir finish artifacts)
let+ r = Dream_tar.targz_response datadir finish artifacts stream in
match r with
| Ok () -> ()
| Error _ ->
Log.warn (fun m -> m "error assembling gzipped tar archive");
())
|> Lwt_result.ok
in
let job_build_full req =
let _job_name = Dream.param req "job"
and build = Dream.param req "build" in
get_uuid build >>= fun uuid ->
Dream.sql req (Model.exec_of_build datadir uuid)
|> if_error "Error getting build" >>= fun exec ->
Dream.respond ~headers:["Content-Type", "application/octet-stream"] exec
|> Lwt_result.ok |> Lwt_result.ok
in in
let upload req = let upload req =
let* body = Dream.body req in let* body = Dream.body req in
Builder.Asn.exec_of_str body |> Lwt.return Builder.Asn.exec_of_cs (Cstruct.of_string body) |> Lwt.return
|> if_error ~status:`Bad_Request "Bad request" |> if_error ~status:`Bad_Request "Bad request"
~log:(fun e -> ~log:(fun e ->
Log.warn (fun m -> m "Received bad builder ASN.1"); Log.warn (fun m -> m "Received bad builder ASN.1: %a" pp_error e))
Log.debug (fun m -> m "Bad builder ASN.1: %a" pp_error e))
>>= fun ((({ name ; _ } : Builder.script_job), uuid, _, _, _, _, _) as exec) -> >>= fun ((({ name ; _ } : Builder.script_job), uuid, _, _, _, _, _) as exec) ->
Log.debug (fun m -> m "Received build %a" pp_exec exec); Log.debug (fun m -> m "Received build %a" pp_exec exec);
Authorization.authorized req name Authorization.authorized req name
@ -573,28 +489,18 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
Dream.query req "sha256" |> Option.to_result ~none:(`Msg "Missing sha256 query parameter") Dream.query req "sha256" |> Option.to_result ~none:(`Msg "Missing sha256 query parameter")
|> Lwt.return |> Lwt.return
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash_hex -> |> if_error ~status:`Bad_Request "Bad request" >>= fun hash_hex ->
begin try Ohex.decode hash_hex |> Lwt_result.return begin try Hex.to_cstruct (`Hex hash_hex) |> Lwt_result.return
with Invalid_argument e -> Lwt_result.fail (`Msg ("Bad hex: " ^ e)) with Invalid_argument e -> Lwt_result.fail (`Msg ("Bad hex: " ^ e))
end end
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash -> |> if_error ~status:`Bad_Request "Bad request" >>= fun hash ->
Dream.sql req (Model.build_hash hash) >>= Model.not_found Dream.sql req (Model.build_hash hash) >>= Model.not_found
|> if_error "Internal server error" >>= fun (job_name, build) -> |> if_error "Internal server error" >>= fun (job_name, build) ->
Dream.redirect req Dream.redirect req
(Link.Job_build.make ~job_name ~build:build.Builder_db.Build.uuid ()) (Fmt.str "/job/%s/build/%a" job_name Uuidm.pp build.Builder_db.Build.uuid)
|> Lwt_result.ok |> Lwt_result.ok
in in
let compare_builds req =
let resolve_artifact_size id_opt conn =
match id_opt with
| None -> Lwt.return_ok None
| Some id ->
Model.build_artifact_by_id id conn >|= fun file ->
Some file.size
in
let process_comparison req =
let build_left = Dream.param req "build_left" in let build_left = Dream.param req "build_left" in
let build_right = Dream.param req "build_right" in let build_right = Dream.param req "build_right" in
get_uuid build_left >>= fun build_left -> get_uuid build_left >>= fun build_left ->
@ -614,16 +520,14 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
Model.build_artifact_data datadir >>= fun build_env_right -> Model.build_artifact_data datadir >>= fun build_env_right ->
Model.build_artifact build_right.Builder_db.Build.uuid (Fpath.v "system-packages") conn >>= Model.build_artifact build_right.Builder_db.Build.uuid (Fpath.v "system-packages") conn >>=
Model.build_artifact_data datadir >>= fun system_packages_right -> Model.build_artifact_data datadir >>= fun system_packages_right ->
resolve_artifact_size build_left.Builder_db.Build.main_binary conn >>= fun build_left_file_size ->
resolve_artifact_size build_right.Builder_db.Build.main_binary conn >>= fun build_right_file_size ->
Model.job_name build_left.job_id conn >>= fun job_left -> Model.job_name build_left.job_id conn >>= fun job_left ->
Model.job_name build_right.job_id conn >|= fun job_right -> Model.job_name build_right.job_id conn >|= fun job_right ->
(job_left, job_right, build_left, build_right, build_left_file_size, (job_left, job_right, build_left, build_right,
build_right_file_size, switch_left, build_env_left, system_packages_left, switch_left, build_env_left, system_packages_left,
switch_right, build_env_right, system_packages_right)) switch_right, build_env_right, system_packages_right))
|> if_error "Internal server error" |> if_error "Internal server error"
>>= fun (job_left, job_right, build_left, build_right, build_left_file_size, >>= fun (job_left, job_right, build_left, build_right,
build_right_file_size, switch_left, build_env_left, system_packages_left, switch_left, build_env_left, system_packages_left,
switch_right, build_env_right, system_packages_right) -> switch_right, build_env_right, system_packages_right) ->
let env_diff = Utils.compare_env build_env_left build_env_right let env_diff = Utils.compare_env build_env_left build_env_right
and pkg_diff = Utils.compare_pkgs system_packages_left system_packages_right and pkg_diff = Utils.compare_pkgs system_packages_left system_packages_right
@ -631,50 +535,13 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
let switch_left = OpamFile.SwitchExport.read_from_string switch_left let switch_left = OpamFile.SwitchExport.read_from_string switch_left
and switch_right = OpamFile.SwitchExport.read_from_string switch_right in and switch_right = OpamFile.SwitchExport.read_from_string switch_right in
let opam_diff = Opamdiff.compare switch_left switch_right in let opam_diff = Opamdiff.compare switch_left switch_right in
(job_left, job_right, build_left, build_right, build_left_file_size, build_right_file_size, env_diff, pkg_diff, opam_diff) Views.compare_builds
|> Lwt.return_ok ~job_left ~job_right
~build_left ~build_right
in ~env_diff
~pkg_diff
let compare_builds req = ~opam_diff
process_comparison req >>= fun |> string_of_html |> Dream.html |> Lwt_result.ok
(job_left, job_right, build_left, build_right, build_left_file_size, build_right_file_size, env_diff, pkg_diff, opam_diff) ->
if is_accept_json req then
let file_size_json = Option.fold ~none:`Null ~some:(fun size -> `Int size) in
let json_response =
`Assoc [
"left", `Assoc [
"job", `String job_left;
"uuid", `String (Uuidm.to_string build_left.uuid);
"platform", `String build_left.platform;
"start_time", `String (Ptime.to_rfc3339 build_left.start);
"finish_time", `String (Ptime.to_rfc3339 build_left.finish);
"main_binary", `Bool (Option.is_some build_left_file_size);
"main_binary_size", file_size_json build_left_file_size;
];
"right", `Assoc [
"job", `String job_right;
"uuid", `String (Uuidm.to_string build_right.uuid);
"platform", `String build_right.platform;
"start_time", `String (Ptime.to_rfc3339 build_right.start);
"finish_time", `String (Ptime.to_rfc3339 build_right.finish);
"main_binary", `Bool (Option.is_some build_right_file_size);
"main_binary_size", file_size_json build_right_file_size;
];
"env_diff", Utils.diff_map_to_json env_diff;
"package_diff", Utils.diff_map_to_json pkg_diff;
"opam_diff", Opamdiff.compare_to_json opam_diff
] |> Yojson.Basic.to_string
in
Dream.json ~status:`OK json_response |> Lwt_result.ok
else
Views.compare_builds
~job_left ~job_right
~build_left ~build_right
~env_diff
~pkg_diff
~opam_diff
|> string_of_html |> Dream.html |> Lwt_result.ok
in in
let upload_binary req = let upload_binary req =
@ -715,40 +582,46 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
>>= fun () -> Dream.respond "" |> Lwt_result.ok >>= fun () -> Dream.respond "" |> Lwt_result.ok
in in
let w f req = or_error_response req (f req) in let redirect_parent req =
let queries = Dream.all_queries req in
let parent_url =
let parent_path =
Dream.target req
|> Utils.Path.of_url
|> List.rev |> List.tl |> List.rev
in
Utils.Path.to_url ~path:parent_path ~queries
in
Dream.redirect ~status:`Temporary_Redirect req parent_url
|> Lwt_result.ok
in
let w f req = or_error_response (f req) in
[ [
`Get, "/", (w (builds ~all:false ~filter_builds_later_than:expired_jobs)); Dream.get "/" (w builds);
`Get, "/job/:job", (w job); Dream.get "/job" (w redirect_parent);
`Get, "/job/:job/failed", (w job_with_failed); Dream.get "/job/:job" (w job);
`Get, "/job/:job/build/latest/**", (w redirect_latest); Dream.get "/job/:job/build" (w redirect_parent);
`Get, "/job/:job/build/latest", (w redirect_latest_no_slash); Dream.get "/job/:job/failed" (w job_with_failed);
`Get, "/job/:job/build/:build", (w job_build); Dream.get "/job/:job/build/latest/**" (w redirect_latest);
`Get, "/job/:job/build/:build/f/**", (w job_build_file); Dream.get "/job/:job/build/:build" (w job_build);
`Get, "/job/:job/build/:build/main-binary", (w redirect_main_binary); Dream.get "/job/:job/build/:build/f/**" (w job_build_file);
`Get, "/job/:job/build/:build/viztreemap", (w @@ job_build_viz `Treemap); Dream.get "/job/:job/build/:build/main-binary" (w redirect_main_binary);
`Get, "/job/:job/build/:build/vizdependencies", (w @@ job_build_viz `Dependencies); Dream.get "/job/:job/build/:build/viztreemap" (w @@ job_build_viz `Treemap);
`Get, "/job/:job/build/:build/script", (w (job_build_static_file `Script)); Dream.get "/job/:job/build/:build/vizdependencies" (w @@ job_build_viz `Dependencies);
`Get, "/job/:job/build/:build/console", (w (job_build_static_file `Console)); Dream.get "/job/:job/build/:build/script" (w (job_build_static_file `Script));
`Get, "/job/:job/build/:build/all.tar.gz", (w job_build_targz); Dream.get "/job/:job/build/:build/console" (w (job_build_static_file `Console));
`Get, "/job/:job/build/:build/exec", (w job_build_full); Dream.get "/failed-builds" (w failed_builds);
`Get, "/failed-builds", (w failed_builds); Dream.get "/job/:job/build/:build/all.tar.gz" (w job_build_targz);
`Get, "/all-builds", (w (builds ~all:true)); Dream.get "/hash" (w hash);
`Get, "/hash", (w hash); Dream.get "/compare/:build_left/:build_right" (w compare_builds);
`Get, "/compare/:build_left/:build_right", (w compare_builds); Dream.post "/upload" (Authorization.authenticate (w upload));
`Post, "/upload", (Authorization.authenticate (w upload)); Dream.post "/job/:job/platform/:platform/upload" (Authorization.authenticate (w upload_binary));
`Post, "/job/:job/platform/:platform/upload", (Authorization.authenticate (w upload_binary));
] ]
let to_dream_route = function
| `Get, path, handler -> Dream.get path handler
| `Post, path, handler -> Dream.post path handler
let to_dream_routes l = List.map to_dream_route l
let routeprefix_ignorelist_when_removing_trailing_slash = [ let routeprefix_ignorelist_when_removing_trailing_slash = [
"/job/:job/build/:build/f"; "/job/:job/build/:build/f"
"/job/:job/build/latest";
] ]
module Middleware = struct module Middleware = struct
@ -769,48 +642,7 @@ module Middleware = struct
let queries = Dream.all_queries req in let queries = Dream.all_queries req in
let url = Utils.Path.to_url ~path ~queries in let url = Utils.Path.to_url ~path ~queries in
(*> Note: See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Location*) (*> Note: See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Location*)
Dream.redirect ~status:`Moved_Permanently req url Dream.redirect ~status:`Permanent_Redirect req url
| _ (* /... *) -> handler req | _ (* /... *) -> handler req
end end
let is_iframe_page ~req =
match Option.bind req (fun r -> Dream.header r "Sec-Fetch-Dest") with
| Some "iframe" -> true
| _ -> false
let error_template error _debug_info suggested_response =
let target =
match error.Dream.request with
| None -> "?"
| Some req -> Dream.target req in
let referer =
Option.bind error.Dream.request (fun req -> Dream.header req "referer")
in
match Dream.status suggested_response with
| `Not_Found ->
let html =
if is_iframe_page ~req:error.Dream.request then
Views.viz_not_found
else
Views.page_not_found ~target ~referer
in
Dream.set_header suggested_response "Content-Type" Dream.text_html;
Dream.set_body suggested_response @@ string_of_html html;
(* NOTE: this does the same job as the dream-encoding middleware;
the middleware is not triggered in error templates *)
let preferred_algorithm =
Option.bind error.request
Dream_encoding.preferred_content_encoding
in
begin match preferred_algorithm with
| Some algorithm ->
let+ body = Dream.body suggested_response in
Dream_encoding.with_encoded_body body ~algorithm suggested_response
| None ->
Lwt.return suggested_response
end
| _ ->
Lwt.return suggested_response
module Link = Link

View file

@ -1,36 +1,47 @@
module High : sig open Lwt.Infix
type t
type 'a s = 'a Lwt.t
external inj : 'a s -> ('a, t) Tar.io = "%identity" module Writer = struct
external prj : ('a, t) Tar.io -> 'a s = "%identity" type out_channel =
end = struct { mutable gz : Gz.Def.encoder
type t ; ic : Cstruct.t
type 'a s = 'a Lwt.t ; oc : Cstruct.t
; stream : Dream.stream }
external inj : 'a -> 'b = "%identity" type 'a t = 'a Lwt.t
external prj : 'a -> 'b = "%identity"
let really_write ({ oc; stream; _ } as state) cs =
let rec until_await gz =
match Gz.Def.encode gz with
| `Await gz -> state.gz <- gz ; Lwt.return_unit
| `Flush gz ->
let max = Cstruct.length oc - Gz.Def.dst_rem gz in
let str = Cstruct.copy oc 0 max in
Dream.write stream str >>= fun () ->
let { Cstruct.buffer; off= cs_off; len= cs_len; } = oc in
until_await (Gz.Def.dst gz buffer cs_off cs_len)
| `End _gz -> assert false in
if Cstruct.length cs = 0
then Lwt.return_unit
else ( let { Cstruct.buffer; off; len; } = cs in
let gz = Gz.Def.src state.gz buffer off len in
until_await gz )
end end
let value v = Tar.High (High.inj v) module HW = Tar.HeaderWriter(Lwt)(Writer)
let ok_value v = value (Lwt_result.ok v) let write_block (header : Tar.Header.t) lpath ({ Writer.ic= buf; _ } as state) =
HW.write ~level:Tar.Header.Ustar header state >>= fun () ->
let run t stream = Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string lpath) >>= fun ic ->
let rec run : type a. (a, 'err, High.t) Tar.t -> (a, 'err) result Lwt.t = let rec loop () =
function let { Cstruct.buffer; off; len; } = buf in
| Tar.Write str -> Lwt_io.read_into_bigstring ic buffer off len >>= function
(* Can this not fail?!? Obviously, it can, but we never know?? *) | 0 -> Lwt.return ()
Lwt_result.ok (Dream.write stream str) | len' ->
| Tar.Seek _ | Tar.Read _ | Tar.Really_read _ -> assert false Writer.really_write state (Cstruct.sub buf 0 len') >>= fun () ->
| Tar.Return value -> Lwt.return value loop ()
| Tar.High value -> High.prj value
| Tar.Bind (x, f) ->
let open Lwt_result.Syntax in
let* v = run x in
run (f v)
in in
run t loop () >>= fun () ->
Writer.really_write state (Tar.Header.zero_padding header)
let header_of_file mod_time (file : Builder_db.file) = let header_of_file mod_time (file : Builder_db.file) =
let file_mode = if Fpath.is_prefix Fpath.(v "bin/") file.filepath then let file_mode = if Fpath.is_prefix Fpath.(v "bin/") file.filepath then
@ -40,53 +51,38 @@ let header_of_file mod_time (file : Builder_db.file) =
in in
Tar.Header.make ~file_mode ~mod_time (Fpath.to_string file.filepath) (Int64.of_int file.size) Tar.Header.make ~file_mode ~mod_time (Fpath.to_string file.filepath) (Int64.of_int file.size)
let contents datadir file : unit -> (string option, _, _) Tar.t = let targz_response datadir finish (files : Builder_db.file list) (stream : Dream.stream) =
let state = ref `Initial in let state =
let dispenser () = let ic = Cstruct.create (4 * 4 * 1024) in
let ( let* ) = Tar.( let* ) in let oc = Cstruct.create 4096 in
let src = Fpath.append datadir (Model.artifact_path file) in let gz =
let* state' = let w = De.Lz77.make_window ~bits:15 in
match !state with let q = De.Queue.create 0x1000 in
| `Initial -> let mtime = Int32.of_float (Unix.gettimeofday ()) in
let* fd = ok_value (Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string src)) in let gz = Gz.Def.encoder `Manual `Manual ~mtime Gz.Unix ~q ~w ~level:4 in
let s = `Active fd in let { Cstruct.buffer; off; len; } = oc in
state := s; Tar.return (Ok s) Gz.Def.dst gz buffer off len
| `Active _ | `Closed as s -> Tar.return (Ok s)
in in
match state' with { Writer.gz; ic; oc; stream; }
| `Closed -> Tar.return (Ok None)
| `Active fd ->
let* data = ok_value (Lwt_io.read ~count:65536 fd) in
if String.length data = 0 then begin
state := `Closed;
let* () = ok_value (Lwt_io.close fd) in
Tar.return (Ok None)
end else
Tar.return (Ok (Some data))
in in
dispenser Lwt_list.iter_s (fun file ->
let hdr = header_of_file finish file in
let entries datadir finish files = write_block hdr Fpath.(datadir // file.localpath) state)
let files = files >>= fun () ->
List.map (fun file -> Writer.really_write state Tar.Header.zero_block >>= fun () ->
let hdr = header_of_file finish file in Writer.really_write state Tar.Header.zero_block >>= fun () ->
let level = Some Tar.Header.Posix in (* assert (Gz.Def.encode gz = `Await) *)
(level, hdr, contents datadir file) let rec until_end gz = match Gz.Def.encode gz with
) | `Await _gz -> assert false
files | `Flush gz | `End gz as flush_or_end ->
let max = Cstruct.length state.oc - Gz.Def.dst_rem gz in
let str = Cstruct.copy state.oc 0 max in
Dream.write stream str >>= fun () -> match flush_or_end with
| `Flush gz ->
let { Cstruct.buffer; off= cs_off; len= cs_len; } = state.oc in
until_end (Gz.Def.dst gz buffer cs_off cs_len)
| `End _ -> Lwt.return_unit
in in
let files = ref files in until_end (Gz.Def.src state.gz De.bigstring_empty 0 0) >>= fun () ->
fun () -> match !files with Dream.flush stream >>= fun () ->
| [] -> Tar.return (Ok None) Dream.close stream
| f :: fs -> files := fs; Tar.return (Ok (Some f))
let targz_response datadir finish files stream =
let entries : (_, _) Tar.entries = entries datadir finish files in
let global_hdr =
Tar.Header.Extended.make
~comment:"Tar file produced by builder-web.%%VERSION_NUM%%"
()
in
let finish32 = Int64.to_int32 finish in
Logs.err (fun m -> m "finished at %ld (%Ld)" finish32 finish);
run (Tar_gz.out_gzipped ~level:9 ~mtime:finish32 Gz.Unix (Tar.out ~global_hdr entries)) stream

View file

@ -1,5 +1,21 @@
(library (library
(name builder_web) (name builder_web)
(libraries builder builder_db dream tyxml bos duration ohex caqti-lwt (libraries
opamdiff ptime.clock.os cmarkit tar tar.gz tar-unix cachet solo5-elftool decompress.de builder
decompress.gz uri digestif dream-encoding)) builder_db
dream
tyxml
bos
duration
hex
caqti-lwt
opamdiff
ptime.clock.os
omd
tar
owee
solo5-elftool
decompress.de
decompress.gz
uri
))

View file

@ -1,89 +0,0 @@
let fpath_url_pp ppf f =
Fpath.segs f
|> List.map Uri.pct_encode
|> Fmt.(list ~sep:(any "/") string) ppf
module Queries_aux = struct
let flatten = Option.value ~default:[]
let add_raw url_str queries =
let uri = Uri.of_string url_str in
let uri = Uri.add_query_params uri queries in
Uri.to_string uri
let add ~encode_query queries url_str =
queries |> flatten |> List.map encode_query |> add_raw url_str
end
let pctencode fmt str = Format.fprintf fmt "%s" (Uri.pct_encode str)
module Root = struct
let make () = "/"
end
module Job = struct
let encode_query = function
| `Platform p -> "platform", [ p ]
let make ?queries ~job_name () =
Fmt.str "/job/%a" pctencode job_name
|> Queries_aux.add ~encode_query queries
let make_failed ?queries ~job_name () =
Fmt.str "/job/%a/failed" pctencode job_name
|> Queries_aux.add ~encode_query queries
end
module Job_build = struct
let make ~job_name ~build () =
Fmt.str "/job/%a/build/%a"
pctencode job_name
Uuidm.pp build
end
module Job_build_artifact = struct
let encode_artifact = function
| `Main_binary -> "/main-binary"
| `Viz_treemap -> "/viztreemap"
| `Viz_dependencies -> "/vizdependencies"
| `Script -> "/script"
| `Console -> "/console"
| `All_targz -> "/all.tar.gz"
| `File f -> "/f/" ^ Fmt.to_to_string fpath_url_pp f
let make_from_string ~job_name ~build ~artifact () =
Fmt.str "/job/%a/build/%a%s"
pctencode job_name
Uuidm.pp build
artifact
let make ~job_name ~build ~artifact () =
let artifact = encode_artifact artifact in
make_from_string ~job_name ~build ~artifact ()
end
module Compare_builds = struct
let make ~left ~right () =
Fmt.str "/compare/%a/%a"
Uuidm.pp left
Uuidm.pp right
end
module Failed_builds = struct
let make ~count ~start () =
Fmt.str "/failed-builds?count=%d&start=%d" count start
end

View file

@ -15,18 +15,10 @@ let pp_error ppf = function
Caqti_error.pp ppf e Caqti_error.pp ppf e
let not_found = function let not_found = function
| None -> Lwt_result.fail `Not_found | None -> Lwt.return (Error `Not_found :> (_, [> error ]) result)
| Some v -> Lwt_result.return v | Some v -> Lwt_result.return v
let staging datadir = Fpath.(datadir / "_staging") let staging datadir = Fpath.(datadir / "_staging")
let artifact_path artifact =
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
(* NOTE: We add the prefix to reduce the number of files in a directory - a
workaround for inferior filesystems. We can easily revert this by changing
this function and adding a migration. *)
let prefix = String.sub sha256 0 2 in
Fpath.(v "_artifacts" / prefix / sha256)
let read_file datadir filepath = let read_file datadir filepath =
let filepath = Fpath.(datadir // filepath) in let filepath = Fpath.(datadir // filepath) in
@ -42,7 +34,7 @@ let read_file datadir filepath =
Log.warn (fun m -> m "Error reading local file %a: %s" Log.warn (fun m -> m "Error reading local file %a: %s"
Fpath.pp filepath (Unix.error_message e)); Fpath.pp filepath (Unix.error_message e));
Lwt.return_error (`File_error filepath) Lwt.return_error (`File_error filepath)
| e -> Lwt.reraise e) | e -> Lwt.fail e)
let build_artifact build filepath (module Db : CONN) = let build_artifact build filepath (module Db : CONN) =
Db.find_opt Builder_db.Build_artifact.get_by_build_uuid (build, filepath) Db.find_opt Builder_db.Build_artifact.get_by_build_uuid (build, filepath)
@ -52,29 +44,15 @@ let build_artifact_by_id id (module Db : CONN) =
Db.find Builder_db.Build_artifact.get id Db.find Builder_db.Build_artifact.get id
let build_artifact_data datadir file = let build_artifact_data datadir file =
read_file datadir (artifact_path file) read_file datadir file.Builder_db.localpath
let build_artifacts build (module Db : CONN) = let build_artifacts build (module Db : CONN) =
Db.collect_list Builder_db.Build_artifact.get_all_by_build build >|= Db.collect_list Builder_db.Build_artifact.get_all_by_build build >|=
List.map snd List.map snd
let solo5_manifest datadir file = let solo5_manifest datadir file =
let cachet = let buf = Owee_buf.map_binary Fpath.(to_string (datadir // file.Builder_db.localpath)) in
let path = Fpath.(to_string (datadir // artifact_path file)) in Solo5_elftool.query_manifest buf |> Result.to_option
let fd = Unix.openfile path [Unix.O_RDONLY] 0 in
let len = Unix.lseek fd 0 Unix.SEEK_END in
let buf =
Bigarray.array1_of_genarray
(Unix.map_file fd Bigarray.char
Bigarray.c_layout false [|len|]) in
Unix.close fd;
let map () ~pos len =
let len = min len (max 0 (Bigarray.Array1.dim buf - pos)) in
Bigarray.Array1.sub buf pos len
in
Cachet.make ~pagesize:8 ~map ()
in
Solo5_elftool.query_manifest cachet |> Result.to_option
let platforms_of_job id (module Db : CONN) = let platforms_of_job id (module Db : CONN) =
Db.collect_list Builder_db.Build.get_platforms_for_job id Db.collect_list Builder_db.Build.get_platforms_for_job id
@ -218,42 +196,46 @@ let cleanup_staging datadir (module Db : Caqti_lwt.CONNECTION) =
(cleanup_staged staged)) (cleanup_staged staged))
stageds stageds
let save path data = let save file data =
let open Lwt.Infix in let open Lwt.Infix in
Lwt.catch Lwt.catch
(fun () -> (fun () ->
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string path) >>= fun oc -> Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string file) >>= fun oc ->
Lwt_io.write oc data >>= fun () -> Lwt_io.write oc data >>= fun () ->
Lwt_io.close oc Lwt_io.close oc
|> Lwt_result.ok) |> Lwt_result.ok)
(function (function
| Unix.Unix_error (e, _, _) -> | Unix.Unix_error (e, _, _) ->
Lwt_result.fail (`Msg (Unix.error_message e)) Lwt_result.fail (`Msg (Unix.error_message e))
| e -> Lwt.reraise e) | e -> Lwt.fail e)
let save_artifacts staging artifacts = let save_file dir staging (filepath, data) =
List.fold_left let size = String.length data in
(fun r (file, data) -> let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
r >>= fun () -> let localpath = Fpath.append dir filepath in
let sha256 = Ohex.encode file.Builder_db.sha256 in let destpath = Fpath.append staging filepath in
let destpath = Fpath.(staging / sha256) in Lwt_result.lift (Bos.OS.Dir.create (Fpath.parent destpath)) >>= fun _ ->
save destpath data) save destpath data >|= fun () ->
(Lwt_result.return ()) { Builder_db.filepath; localpath; sha256; size }
artifacts
let commit_files datadir staging_dir job_name uuid artifacts = let save_files dir staging files =
(* First we move the artifacts *)
List.fold_left List.fold_left
(fun r artifact -> (fun r file ->
r >>= fun () -> r >>= fun acc ->
let sha256 = Ohex.encode artifact.Builder_db.sha256 in save_file dir staging file >>= fun file ->
let src = Fpath.(staging_dir / sha256) in Lwt_result.return (file :: acc))
let dest = Fpath.(datadir // artifact_path artifact) in (Lwt_result.return [])
Lwt.return (Bos.OS.Dir.create (Fpath.parent dest)) >>= fun _created -> files
Lwt.return (Bos.OS.Path.move ~force:true src dest))
(Lwt_result.return ()) let save_all staging_dir (job : Builder.script_job) uuid artifacts =
artifacts >>= fun () -> let build_dir = Fpath.(v job.Builder.name / Uuidm.to_string uuid) in
(* Now the staging dir only contains script & console *) let output_dir = Fpath.(build_dir / "output")
and staging_output_dir = Fpath.(staging_dir / "output") in
Lwt.return (Bos.OS.Dir.create staging_output_dir) >>= fun _ ->
save_files output_dir staging_output_dir artifacts >>= fun artifacts ->
Lwt_result.return artifacts
let commit_files datadir staging_dir job_name uuid =
let job_dir = Fpath.(datadir / job_name) in let job_dir = Fpath.(datadir / job_name) in
let dest = Fpath.(job_dir / Uuidm.to_string uuid) in let dest = Fpath.(job_dir / Uuidm.to_string uuid) in
Lwt.return (Bos.OS.Dir.create job_dir) >>= fun _ -> Lwt.return (Bos.OS.Dir.create job_dir) >>= fun _ ->
@ -266,32 +248,12 @@ let infer_section_and_synopsis artifacts =
| Some opam -> OpamFile.OPAM.synopsis opam, OpamFile.OPAM.descr_body opam | Some opam -> OpamFile.OPAM.synopsis opam, OpamFile.OPAM.descr_body opam
in in
let infer_section switch root = let infer_section switch root =
let root_pkg = root.OpamPackage.name in let root_pkg_name = OpamPackage.Name.to_string root.OpamPackage.name in
let is_unikernel = if Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
(* since mirage 4.2.0, the x-mirage-opam-lock-location is emitted *) let influx = OpamPackage.Name.of_string "metrics-influx" in
Option.value ~default:false if OpamPackage.Set.exists (fun p -> OpamPackage.Name.equal p.OpamPackage.name influx)
(Option.map (fun opam -> switch.OpamFile.SwitchExport.selections.OpamTypes.sel_installed
Option.is_some (OpamFile.OPAM.extended opam "x-mirage-opam-lock-location" Fun.id)) then
(OpamPackage.Name.Map.find_opt root_pkg switch.OpamFile.SwitchExport.overlays))
in
let root_pkg_name = OpamPackage.Name.to_string root_pkg in
if is_unikernel || Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
let metrics_influx =
let influx = OpamPackage.Name.of_string "metrics-influx" in
OpamPackage.Set.exists (fun p -> OpamPackage.Name.equal p.OpamPackage.name influx)
switch.OpamFile.SwitchExport.selections.OpamTypes.sel_installed
in
let mirage_monitoring =
let monitoring = OpamPackage.Name.of_string "mirage-monitoring" in
match OpamPackage.Name.Map.find_opt root_pkg switch.OpamFile.SwitchExport.overlays with
| None -> false
| Some opam ->
let depends = OpamFile.OPAM.depends opam in
OpamFormula.fold_left (fun acc (n', _) ->
acc || OpamPackage.Name.equal n' monitoring)
false depends
in
if metrics_influx || mirage_monitoring then
"Unikernels (with metrics reported to Influx)" "Unikernels (with metrics reported to Influx)"
else else
"Unikernels" "Unikernels"
@ -320,8 +282,7 @@ let compute_input_id artifacts =
get_hash (Fpath.v "build-environment"), get_hash (Fpath.v "build-environment"),
get_hash (Fpath.v "system-packages") get_hash (Fpath.v "system-packages")
with with
| Some a, Some b, Some c -> | Some a, Some b, Some c -> Some (Mirage_crypto.Hash.SHA256.digest (Cstruct.concat [a;b;c]))
Some Digestif.SHA256.(to_raw_string (digestv_string [a;b;c]))
| _ -> None | _ -> None
let save_console_and_script staging_dir job_name uuid console script = let save_console_and_script staging_dir job_name uuid console script =
@ -343,25 +304,6 @@ let prepare_staging staging_dir =
then Lwt_result.fail (`Msg "build directory already exists") then Lwt_result.fail (`Msg "build directory already exists")
else Lwt_result.return () else Lwt_result.return ()
(* saving:
- for each artifact compute its sha256 checksum -- calling Lwt.pause in
between
- lookup artifact sha256 in the database and filter them out of the list: not_in_db
- mkdir -p _staging/uuid/
- save console & script to _staging/uuid/
- save each artifact in not_in_db as _staging/uuid/sha256
committing:
- for each artifact mv _staging/uuid/sha256 _artifacts/sha256
(or _artifacts/prefix(sha256)/sha256 where prefix(sha256) is the first two hex digits in sha256)
- now _staging/uuid only contains console & script so we mv _staging/uuid _staging/job/uuid
potential issues:
- race condition in uploading same artifact:
* if the artifact already exists in the database and thus filesystem then nothing is done
* if the artifact is added to the database and/or filesystem we atomically overwrite it
- input_id depends on a sort order?
*)
let add_build let add_build
~datadir ~datadir
~cachedir ~cachedir
@ -373,7 +315,7 @@ let add_build
let job_name = job.Builder.name in let job_name = job.Builder.name in
let staging_dir = Fpath.(staging datadir / Uuidm.to_string uuid) in let staging_dir = Fpath.(staging datadir / Uuidm.to_string uuid) in
let or_cleanup x = let or_cleanup x =
Lwt_result.map_error (fun e -> Lwt_result.map_err (fun e ->
Bos.OS.Dir.delete ~recurse:true staging_dir Bos.OS.Dir.delete ~recurse:true staging_dir
|> Result.iter_error (fun e -> |> Result.iter_error (fun e ->
Log.err (fun m -> m "Failed to remove staging dir %a: %a" Log.err (fun m -> m "Failed to remove staging dir %a: %a"
@ -382,35 +324,16 @@ let add_build
e) e)
x x
in in
let not_interesting p = let artifacts_to_preserve =
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes" let not_interesting p =
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes"
in
List.filter (fun (p, _) -> not (not_interesting p)) raw_artifacts
in in
begin
List.fold_left
(fun r (filepath, data) ->
r >>= fun acc ->
if not_interesting filepath then
Lwt_result.return acc
else
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data))
and size = String.length data in
Lwt_result.ok (Lwt.pause ()) >|= fun () ->
({ filepath; sha256; size }, data) :: acc)
(Lwt_result.return [])
raw_artifacts
end >>= fun artifacts ->
or_cleanup (prepare_staging staging_dir) >>= fun () -> or_cleanup (prepare_staging staging_dir) >>= fun () ->
or_cleanup (save_console_and_script staging_dir job_name uuid console job.Builder.script) or_cleanup (save_console_and_script staging_dir job_name uuid console job.Builder.script)
>>= fun (console, script) -> >>= fun (console, script) ->
List.fold_left or_cleanup (save_all staging_dir job uuid artifacts_to_preserve) >>= fun artifacts ->
(fun r ((f, _) as artifact) ->
r >>= fun acc ->
Db.find Builder_db.Build_artifact.exists f.sha256 >|= fun exists ->
if exists then acc else artifact :: acc)
(Lwt_result.return [])
artifacts >>= fun artifacts_to_save ->
or_cleanup (save_artifacts staging_dir artifacts_to_save) >>= fun () ->
let artifacts = List.map fst artifacts in
let r = let r =
Db.start () >>= fun () -> Db.start () >>= fun () ->
Db.exec Job.try_add job_name >>= fun () -> Db.exec Job.try_add job_name >>= fun () ->
@ -478,11 +401,11 @@ let add_build
Db.exec Build_artifact.add (file, id)) Db.exec Build_artifact.add (file, id))
(Lwt_result.return ()) (Lwt_result.return ())
remaining_artifacts_to_add >>= fun () -> remaining_artifacts_to_add >>= fun () ->
commit_files datadir staging_dir job_name uuid (List.map fst artifacts_to_save) >>= fun () -> Db.commit () >>= fun () ->
Db.commit () >|= fun () -> commit_files datadir staging_dir job_name uuid >|= fun () ->
main_binary main_binary
in in
Lwt_result.bind_lwt_error (or_cleanup r) Lwt_result.bind_lwt_err (or_cleanup r)
(fun e -> (fun e ->
Db.rollback () Db.rollback ()
|> Lwt.map (fun r -> |> Lwt.map (fun r ->
@ -498,7 +421,7 @@ let add_build
and uuid = Uuidm.to_string uuid and uuid = Uuidm.to_string uuid
and job = job.name and job = job.name
and platform = job.platform and platform = job.platform
and sha256 = Ohex.encode main_binary.sha256 and `Hex sha256 = Hex.of_cstruct main_binary.sha256
in in
let fp_str p = Fpath.(to_string (datadir // p)) in let fp_str p = Fpath.(to_string (datadir // p)) in
let args = let args =
@ -508,8 +431,7 @@ let add_build
"--uuid=" ^ uuid ; "--platform=" ^ platform ; "--uuid=" ^ uuid ; "--platform=" ^ platform ;
"--cache-dir=" ^ Fpath.to_string cachedir ; "--cache-dir=" ^ Fpath.to_string cachedir ;
"--data-dir=" ^ Fpath.to_string datadir ; "--data-dir=" ^ Fpath.to_string datadir ;
"--main-binary-filepath=" ^ Fpath.to_string main_binary.filepath ; fp_str main_binary.localpath ])
fp_str Fpath.(datadir // artifact_path main_binary) ])
in in
Log.debug (fun m -> m "executing hooks with %s" args); Log.debug (fun m -> m "executing hooks with %s" args);
let dir = Fpath.(configdir / "upload-hooks") in let dir = Fpath.(configdir / "upload-hooks") in
@ -528,7 +450,7 @@ let add_build
let rec go () = let rec go () =
let next_file = Unix.readdir dh in let next_file = Unix.readdir dh in
let file = Fpath.(dir / next_file) in let file = Fpath.(dir / next_file) in
if is_executable file && Fpath.has_ext ".sh" file then if is_executable file then
ignore (Sys.command (Fpath.to_string file ^ " " ^ args ^ " &")); ignore (Sys.command (Fpath.to_string file ^ " " ^ args ^ " &"));
go () go ()
in in
@ -537,45 +459,3 @@ let add_build
| End_of_file -> | End_of_file ->
Unix.closedir dh; Unix.closedir dh;
Lwt.return (Ok ()) Lwt.return (Ok ())
(* NOTE: this function is duplicatedi in bin/builder_db_app.ml *)
let console_of_string data =
let lines = String.split_on_char '\n' data in
List.filter_map (fun line ->
match String.index line ':' with
| 0 -> Log.warn (fun m -> m "console line starting with colon %S" line); None
| i ->
(* the timestamp is of the form "%fs", e.g. 0.867s; so chop off the 's' *)
let delta = float_of_string (String.sub line 0 (i - 1)) in
let delta = Int64.to_int (Duration.of_f delta) in
let line = String.sub line i (String.length line - i) in
Some (delta, line)
| exception Not_found ->
if line <> "" then
Log.warn (fun m -> m "Unexpected console line %S" line);
None)
lines
let exec_of_build datadir uuid (module Db : CONN) =
let open Builder_db in
Db.find_opt Build.get_by_uuid uuid >>= not_found >>= fun (build_id, build) ->
let { Builder_db.Build.start; finish; result;
job_id; console; script; platform; _ } =
build
in
Db.find Builder_db.Job.get job_id >>= fun job_name ->
read_file datadir script >>= fun script ->
let job = { Builder.name = job_name; platform; script } in
read_file datadir console >>= fun console ->
let out = console_of_string console in
Db.collect_list Builder_db.Build_artifact.get_all_by_build build_id >>= fun artifacts ->
Lwt_list.fold_left_s (fun acc (_id, ({ filepath; _ } as file)) ->
match acc with
| Error _ as e -> Lwt.return e
| Ok acc ->
build_artifact_data datadir file >>= fun data ->
Lwt.return (Ok ((filepath, data) :: acc)))
(Ok []) artifacts >>= fun data ->
let exec = (job, uuid, out, start, finish, result, data) in
let data = Builder.Asn.exec_to_str exec in
Lwt.return (Ok data)

View file

@ -2,10 +2,9 @@ type error = [ Caqti_error.call_or_retrieve | `Not_found | `File_error of Fpath.
val pp_error : Format.formatter -> error -> unit val pp_error : Format.formatter -> error -> unit
val not_found : 'a option -> ('a, [> `Not_found ]) result Lwt.t val not_found : 'a option -> ('a, [> error ]) result Lwt.t
val staging : Fpath.t -> Fpath.t val staging : Fpath.t -> Fpath.t
val artifact_path : Builder_db.file -> Fpath.t
val cleanup_staging : Fpath.t -> Caqti_lwt.connection -> val cleanup_staging : Fpath.t -> Caqti_lwt.connection ->
(unit, [> `Msg of string ]) result Lwt.t (unit, [> `Msg of string ]) result Lwt.t
@ -31,9 +30,9 @@ val build : Uuidm.t -> Caqti_lwt.connection ->
([`build] Builder_db.id * Builder_db.Build.t, [> error ]) result Lwt.t ([`build] Builder_db.id * Builder_db.Build.t, [> error ]) result Lwt.t
val build_with_main_binary : [`job] Builder_db.id -> string -> Caqti_lwt.connection -> val build_with_main_binary : [`job] Builder_db.id -> string -> Caqti_lwt.connection ->
((Builder_db.Build.t * Builder_db.file) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t ((Builder_db.Build.t * Builder_db.file option) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
val build_hash : string -> Caqti_lwt.connection -> val build_hash : Cstruct.t -> Caqti_lwt.connection ->
((string * Builder_db.Build.t) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t ((string * Builder_db.Build.t) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
val build_exists : Uuidm.t -> Caqti_lwt.connection -> val build_exists : Uuidm.t -> Caqti_lwt.connection ->
@ -104,6 +103,3 @@ val add_build :
Builder.execution_result * (Fpath.t * string) list) -> Builder.execution_result * (Fpath.t * string) list) ->
Caqti_lwt.connection -> Caqti_lwt.connection ->
(unit, [> Caqti_error.call_or_retrieve | `Msg of string ]) result Lwt.t (unit, [> Caqti_error.call_or_retrieve | `Msg of string ]) result Lwt.t
val exec_of_build : Fpath.t -> Uuidm.t -> Caqti_lwt.connection ->
(string, [> Caqti_error.call_or_retrieve | `Not_found | `File_error of Fpath.t ]) result Lwt.t

View file

@ -1,3 +1,4 @@
module String_map = struct module String_map = struct
include Map.Make(String) include Map.Make(String)
@ -5,27 +6,6 @@ module String_map = struct
update key (function None -> Some [ v ] | Some xs -> Some (v :: xs)) t update key (function None -> Some [ v ] | Some xs -> Some (v :: xs)) t
end end
let diff_map_to_json (left, right, different_versions) =
let convert_list lst =
`List (List.map (fun (name, version) ->
`Assoc [("name", `String name); ("version", `String version)]
) lst)
in
let convert_diff_versions lst =
`List (List.map (fun (name, version1, version2) ->
`Assoc [
("name", `String name);
("version_left", `String version1);
("version_right", `String version2)
]
) lst)
in
`Assoc [
("left_packages", convert_list left);
("right_packages", convert_list right);
("different_versions", convert_diff_versions different_versions)
]
let diff_map a b = let diff_map a b =
let diff a b = let diff a b =
String_map.fold (fun k v acc -> String_map.fold (fun k v acc ->
@ -65,30 +45,85 @@ let compare_pkgs p1 p2 =
in in
diff_map (parse_pkgs p1) (parse_pkgs p2) diff_map (parse_pkgs p1) (parse_pkgs p2)
let md_to_html ?adjust_heading ?(safe = true) data = module Omd = struct
let open Cmarkit in
let doc = Doc.of_string ~strict:false ~heading_auto_ids:true data in let make_safe omd =
let doc = let rec safe_block = function
Option.fold ~none:doc | Omd.Paragraph (attr, inline) ->
~some:(fun lvl -> safe_inline inline
let block _m = function |> Option.map (fun inline -> Omd.Paragraph (attr, inline))
| Block.Heading (h, meta) -> | Omd.List (attr, typ, spacing, blocks) ->
let open Block.Heading in let blocks = List.filter_map (fun b ->
let level = level h let b = List.filter_map safe_block b in
and id = id h if b = [] then None else Some b)
and layout = layout h blocks
and inline = inline h in
in if blocks = [] then None else
let h' = make ?id ~layout ~level:(level + lvl) inline in Some (Omd.List (attr, typ, spacing, blocks))
Mapper.ret (Block.Heading (h', meta)) | Omd.Blockquote (attr, blocks) ->
| Block.Blocks _ -> Mapper.default let blocks = List.filter_map safe_block blocks in
| x -> Mapper.ret x if blocks = [] then None else
in Some (Omd.Blockquote (attr, blocks))
let mapper = Mapper.make ~block () in | Omd.Heading (attr, level, inline) ->
Mapper.map_doc mapper doc) safe_inline inline
adjust_heading |> Option.map (fun inline -> Omd.Heading (attr, level, inline))
in | Omd.Html_block _ -> None
Cmarkit_html.of_doc ~safe doc | Omd.Definition_list (attr, def_elts) ->
let def_elts = List.filter_map safe_def_elts def_elts in
if def_elts = [] then None else
Some (Omd.Definition_list (attr, def_elts))
| Omd.Code_block _
| Omd.Thematic_break _ as v -> Some v
and safe_def_elts { term ; defs } =
let defs = List.filter_map safe_inline defs in
safe_inline term
|> Option.map (fun term -> { Omd.term ; defs })
and safe_inline = function
| Concat (attr, inline) ->
Some (Concat (attr, List.filter_map safe_inline inline))
| Emph (attr, inline) ->
safe_inline inline
|> Option.map (fun inline -> Omd.Emph (attr, inline))
| Strong (attr, inline) ->
safe_inline inline
|> Option.map (fun inline -> Omd.Strong (attr, inline))
| Link (attr, link) ->
begin match safe_link link with
| `No_label | `Relative -> safe_inline link.Omd.label
| `Link l -> Some (Omd.Link (attr, l))
end
| Image (attr, link) ->
begin match safe_link link with
| `No_label | `Relative -> None
| `Link l -> Some (Omd.Image (attr, l))
end
| Html _ -> None
| Text _
| Code _
| Hard_break _
| Soft_break _ as v -> Some v
and safe_link ({ label ; destination ; _ } as l) =
let absolute_link =
String.(length destination >= 2 && equal (sub destination 0 2) "//") ||
String.(length destination >= 7 && equal (sub destination 0 7) "http://") ||
String.(length destination >= 8 && equal (sub destination 0 8) "https://")
in
if absolute_link then
match safe_inline label with
| None -> `No_label
| Some label -> `Link { l with label }
else
`Relative
in
List.filter_map safe_block omd
let html_of_string markdown =
markdown
|> Omd.of_string
|> make_safe
|> Omd.to_html
end
module Path = struct module Path = struct

View file

@ -31,6 +31,8 @@ type nav = [
let pp_platform = let pp_platform =
Fmt.(option ~none:(any "") (append (any "on ") string)) Fmt.(option ~none:(any "") (append (any "on ") string))
let pp_platform_query =
Fmt.(option ~none:(any "") (append (any "?platform=") string))
let static_css = Tyxml.Html.Unsafe.data {| let static_css = Tyxml.Html.Unsafe.data {|
body { body {
@ -91,28 +93,29 @@ let make_breadcrumbs nav =
| `Default -> | `Default ->
to_nav [H.txt "Home", "/"] to_nav [H.txt "Home", "/"]
| `Job (job_name, platform) -> | `Job (job_name, platform) ->
let queries =
platform |> Option.map (fun v -> `Platform v) |> Option.to_list in
to_nav [ to_nav [
H.txt "Home", "/"; H.txt "Home", "/";
txtf "Job %s" job_name, Link.Job.make ~job_name (); txtf "Job %s" job_name, Fmt.str "/job/%s" job_name ;
( (
txtf "%a" pp_platform platform, txtf "%a" pp_platform platform,
Link.Job.make ~job_name ~queries () Fmt.str "/job/%s/%a" job_name pp_platform_query platform
) )
] ]
| `Build (job_name, build) -> | `Build (job_name, build) ->
to_nav [ to_nav [
H.txt "Home", "/"; H.txt "Home", "/";
txtf "Job %s" job_name, Link.Job.make ~job_name (); txtf "Job %s" job_name, Fmt.str "/job/%s" job_name;
( (
txtf "%a" pp_platform (Some build.Builder_db.Build.platform), txtf "%a" pp_platform (Some build.Builder_db.Build.platform),
Link.Job.make ~job_name Fmt.str "/job/%s/%a"
~queries:[ `Platform build.Builder_db.Build.platform ] () job_name
pp_platform_query (Some build.Builder_db.Build.platform)
); );
( (
txtf "Build %a" pp_ptime build.Builder_db.Build.start, txtf "Build %a" pp_ptime build.Builder_db.Build.start,
Link.Job_build.make ~job_name ~build:build.Builder_db.Build.uuid () Fmt.str "/job/%s/build/%a"
job_name
Uuidm.pp build.Builder_db.Build.uuid
); );
] ]
| `Comparison ((job_left, build_left), (job_right, build_right)) -> | `Comparison ((job_left, build_left), (job_right, build_right)) ->
@ -122,9 +125,9 @@ let make_breadcrumbs nav =
txtf "Comparison between %s@%a and %s@%a" txtf "Comparison between %s@%a and %s@%a"
job_left pp_ptime build_left.Builder_db.Build.start job_left pp_ptime build_left.Builder_db.Build.start
job_right pp_ptime build_right.Builder_db.Build.start, job_right pp_ptime build_right.Builder_db.Build.start,
Link.Compare_builds.make Fmt.str "/compare/%a/%a"
~left:build_left.uuid Uuidm.pp build_left.uuid
~right:build_right.uuid () Uuidm.pp build_right.uuid
); );
] ]
@ -188,13 +191,13 @@ let artifact
~basename ~basename
~job_name ~job_name
~build ~build
~file:{ Builder_db.filepath; sha256; size } ~file:{ Builder_db.filepath; localpath = _; sha256; size }
= =
let artifact_link = let artifact_link =
Link.Job_build_artifact.make Fmt.str "/job/%s/build/%a/f/%a"
~job_name job_name
~build:build.Builder_db.Build.uuid Uuidm.pp build.Builder_db.Build.uuid
~artifact:(`File filepath) () Fpath.pp filepath
in in
[ [
H.a ~a:H.[a_href artifact_link] [ H.a ~a:H.[a_href artifact_link] [
@ -202,53 +205,39 @@ let artifact
else txtf "%a" Fpath.pp filepath else txtf "%a" Fpath.pp filepath
]; ];
H.txt " "; H.txt " ";
H.code [txtf "SHA256:%s" (Ohex.encode sha256)]; H.code [txtf "SHA256:%a" Hex.pp (Hex.of_cstruct sha256)];
txtf " (%a)" Fmt.byte_size size; txtf " (%a)" Fmt.byte_size size;
] ]
let page_not_found ~target ~referer = let resource_not_found ~text =
[
H.h2 ~a:[ H.a_style "padding-top: 33vh" ]
[ txtf "This resource does not exist" ];
H.p [
H.txt @@ Fmt.str "Error: '%s'" text
];
]
|> layout ~title:"Resource not found"
let page_not_found ~path ~referer =
[ [
H.h2 ~a:[ H.a_style "padding-top: 33vh" ] H.h2 ~a:[ H.a_style "padding-top: 33vh" ]
[ txtf "This page does not exist" ]; [ txtf "This page does not exist" ];
H.p [ H.p [
H.txt @@ Fmt.str "You requested the page %s" target H.txt @@ Fmt.str "You requested the page %s" path
]; ];
] @ ( ] @ (
match referer with match referer with
| None -> [] | None -> []
| Some prev_url -> [ | Some prev_url -> [
H.p [ H.p [
H.txt "Go back to "; H.txt "Go back to ";
H.a ~a:H.[ a_href prev_url ] [ H.txt prev_url ]; H.a ~a:H.[ a_href prev_url ] [ H.txt prev_url ];
]; ];
] ]
) )
|> layout ~title:"Page not found" |> layout ~title:"Page not found"
let viz_not_found =
let title = "Visualization not found" in
let content =
[
H.h2 ~a:[ H.a_style "\
padding-top: 41vh;\
text-align: center;\
"]
[ txtf "%s" title ];
]
in
let static_css = static_css :: [ Tyxml.Html.Unsafe.data "\
body { background: rgb(191,191,191); }\
"]
in
let body = [ H.div content ] in
H.html
(H.head (H.title (H.txt title))
[H.style ~a:H.[a_mime_type "text/css"] static_css])
(H.body [
H.main body
])
module Builds = struct module Builds = struct
let data = let data =
@ -274,9 +263,9 @@ The filename suffix of the unikernel binary indicate the expected execution envi
A persistent link to the latest successful build is available as A persistent link to the latest successful build is available as
`/job/*jobname*/build/latest/`. Each build can be reproduced with `/job/*jobname*/build/latest/`. Each build can be reproduced with
[orb](https://github.com/robur-coop/orb/). The builds are scheduled and executed [orb](https://github.com/roburio/orb/). The builds are scheduled and executed
daily by [builder](https://github.com/robur-coop/builder/). This web interface is daily by [builder](https://github.com/roburio/builder/). This web interface is
[builder-web](https://git.robur.coop/robur/builder-web/). Read further information [builder-web](https://git.robur.io/robur/builder-web/). Read further information
[on our project page](https://robur.coop/Projects/Reproducible_builds). This [on our project page](https://robur.coop/Projects/Reproducible_builds). This
work has been funded by the European Union under the work has been funded by the European Union under the
[NGI Pointer](https://pointer.ngi.eu) program. Contact team ATrobur.coop if you [NGI Pointer](https://pointer.ngi.eu) program. Contact team ATrobur.coop if you
@ -285,7 +274,7 @@ have questions or suggestions.
let make_header = let make_header =
[ [
H.Unsafe.data (Utils.md_to_html data); H.Unsafe.data (Utils.Omd.html_of_string data);
H.form ~a:H.[a_action "/hash"; a_method `Get] [ H.form ~a:H.[a_action "/hash"; a_method `Get] [
H.label [ H.label [
H.txt "Search artifact by SHA256"; H.txt "Search artifact by SHA256";
@ -308,24 +297,29 @@ have questions or suggestions.
check_icon latest_build.Builder_db.Build.result; check_icon latest_build.Builder_db.Build.result;
H.txt " "; H.txt " ";
H.a ~a:[ H.a ~a:[
H.a_href @@ Link.Job.make ~job_name Fmt.kstr H.a_href "/job/%s/%a"
~queries:[ `Platform platform ] () job_name
] pp_platform_query (Some platform)]
[H.txt platform]; [H.txt platform];
H.txt " "; H.txt " ";
H.a ~a:[ H.a ~a:[
H.a_href @@ Link.Job_build.make Fmt.kstr H.a_href "/job/%s/build/%a/"
~job_name job_name
~build:latest_build.Builder_db.Build.uuid ()] Uuidm.pp latest_build.Builder_db.Build.uuid]
[txtf "%a" pp_ptime latest_build.Builder_db.Build.start]; [txtf "%a" pp_ptime latest_build.Builder_db.Build.start];
H.txt " "; H.txt " ";
] ]
@ artifact @ (match latest_artifact with
~basename:true | Some main_binary ->
~job_name artifact
~build:latest_build ~basename:true
~file:latest_artifact ~job_name
~build:latest_build
~file:main_binary
| None ->
[ txtf "Build failure: %a" Builder.pp_execution_result
latest_build.Builder_db.Build.result ]
)
@ [ H.br () ] @ [ H.br () ]
let make_jobs jobs = let make_jobs jobs =
@ -356,41 +350,15 @@ have questions or suggestions.
H.txt "View the latest failed builds "; H.txt "View the latest failed builds ";
H.a ~a:H.[a_href "/failed-builds"] H.a ~a:H.[a_href "/failed-builds"]
[H.txt "here"]; [H.txt "here"];
H.txt "."; H.txt "."
]] ]]
let make_all_or_active all = let make section_job_map =
[ H.p [
H.txt (if all then "View active jobs " else "View all jobs ");
H.a ~a:H.[a_href (if all then "/" else "/all-builds")]
[H.txt "here"];
H.txt ".";
]]
let make ~all section_job_map =
layout ~title:"Reproducible OPAM builds" layout ~title:"Reproducible OPAM builds"
(make_header (make_header
@ make_body section_job_map @ make_body section_job_map
@ make_failed_builds @ make_failed_builds)
@ make_all_or_active all)
let make_json ~all:_ section_job_map =
let all_jobs =
Utils.String_map.fold
(fun _section jobs acc ->
List.map (fun (job_name, _, _) -> `String job_name) jobs @ acc)
section_job_map []
in
let by_section =
Utils.String_map.fold
(fun section jobs acc ->
(section, `List (List.map (fun (job_name, _, _) -> `String job_name) jobs)) :: acc)
section_job_map []
in
`Assoc [
"jobs", `List all_jobs;
"jobs_by_section", `Assoc by_section;
]
end end
module Job = struct module Job = struct
@ -404,7 +372,7 @@ module Job = struct
[ [
H.h2 ~a:H.[a_id "readme"] [H.txt "README"]; H.h2 ~a:H.[a_id "readme"] [H.txt "README"];
H.a ~a:H.[a_href "#builds"] [H.txt "Skip to builds"]; H.a ~a:H.[a_href "#builds"] [H.txt "Skip to builds"];
H.Unsafe.data (Utils.md_to_html ~adjust_heading:2 data) H.Unsafe.data (Utils.Omd.html_of_string data)
] ]
) )
@ -414,9 +382,9 @@ module Job = struct
check_icon build.Builder_db.Build.result; check_icon build.Builder_db.Build.result;
txtf " %s " build.platform; txtf " %s " build.platform;
H.a ~a:H.[ H.a ~a:H.[
a_href @@ Link.Job_build.make Fmt.kstr a_href "/job/%s/build/%a"
~job_name job_name
~build:build.Builder_db.Build.uuid () ] Uuidm.pp build.Builder_db.Build.uuid ]
[ [
txtf "%a" pp_ptime build.Builder_db.Build.start; txtf "%a" pp_ptime build.Builder_db.Build.start;
]; ];
@ -439,22 +407,23 @@ module Job = struct
H.h2 ~a:H.[a_id "builds"] [H.txt "Builds"]; H.h2 ~a:H.[a_id "builds"] [H.txt "Builds"];
H.a ~a:H.[a_href "#readme"] [H.txt "Back to readme"]; H.a ~a:H.[a_href "#readme"] [H.txt "Back to readme"];
H.ul (builds |> List.map (make_build ~job_name)); H.ul (builds |> List.map (make_build ~job_name));
let queries =
platform |> Option.map (fun p -> `Platform p) |> Option.to_list
in
if failed then if failed then
H.p [ H.p [
H.txt "Excluding failed builds " ; H.txt "Excluding failed builds " ;
H.a ~a:H.[ H.a ~a:H.[
a_href @@ Link.Job.make ~job_name ~queries () a_href @@ Fmt.str "/job/%s%a"
job_name
pp_platform_query platform
] ]
[H.txt "here"] ; [H.txt "here"] ;
H.txt "." ] H.txt "." ]
else else
H.p [ H.p [
H.txt "Including failed builds " ; H.txt "Including failed builds " ;
H.a ~a:H.[ H.a ~a:H.[
a_href @@ Link.Job.make_failed ~job_name ~queries () a_href @@ Fmt.str "/job/%s/failed%a"
job_name
pp_platform_query platform
] ]
[H.txt "here"] ; [H.txt "here"] ;
H.txt "." ] H.txt "." ]
@ -469,28 +438,7 @@ module Job = struct
let title = Fmt.str "Job %s %a" job_name pp_platform platform in let title = Fmt.str "Job %s %a" job_name pp_platform platform in
layout ~nav ~title @@ make_body ~failed ~job_name ~platform ~readme builds layout ~nav ~title @@ make_body ~failed ~job_name ~platform ~readme builds
let make_json ~failed:_ ~job_name:_ ~platform:_ ~readme:_ builds =
(* For now we will ignore most arguments. It's to keep the arguments the
same as [make]. This is subject to change. *)
let build (build, main_binary) =
let main_binary =
match main_binary with
| None -> `Null
| Some { Builder_db.filepath; sha256; size } ->
`Assoc [
"filename", `String (Fpath.basename filepath);
"sha256", `String (Ohex.encode sha256);
"size", `Int size;
]
in
`Assoc [
"uuid", `String (Uuidm.to_string build.Builder_db.Build.uuid);
"main_binary", main_binary
]
in
`Assoc [
"builds", `List (List.map build builds);
]
end end
module Job_build = struct module Job_build = struct
@ -529,13 +477,13 @@ module Job_build = struct
pp_devices block_devices pp_devices net_devices] pp_devices block_devices pp_devices net_devices]
in in
let aux (file:Builder_db.file) = let aux (file:Builder_db.file) =
let sha256_hex = Ohex.encode file.sha256 in let (`Hex sha256_hex) = Hex.of_cstruct file.sha256 in
[ [
H.dt [ H.dt [
H.a ~a:H.[a_href @@ Link.Job_build_artifact.make H.a ~a:H.[Fmt.kstr a_href "/job/%s/build/%a/f/%a"
~job_name job_name
~build:build_uuid Uuidm.pp build_uuid
~artifact:(`File file.filepath) () Fpath.pp file.filepath
] ]
[H.code [txtf "%a" Fpath.pp file.filepath]] ]; [H.code [txtf "%a" Fpath.pp file.filepath]] ];
H.dd ([ H.dd ([
@ -554,7 +502,7 @@ module Job_build = struct
] ]
let make_reproductions let make_reproductions
~job_name ~name
~(build:Builder_db.Build.t) ~(build:Builder_db.Build.t)
~same_input_same_output ~same_input_same_output
~different_input_same_output ~different_input_same_output
@ -563,7 +511,7 @@ module Job_build = struct
List.map (fun (build:Builder_db.Build.t) -> List.map (fun (build:Builder_db.Build.t) ->
H.li [ H.li [
txtf "on %s, same input, " build.platform; txtf "on %s, same input, " build.platform;
H.a ~a:H.[a_href @@ Link.Job_build.make ~job_name ~build:build.uuid ()] H.a ~a:H.[Fmt.kstr a_href "/job/%s/build/%a" name Uuidm.pp build.uuid]
[txtf "%a" pp_ptime build.start] [txtf "%a" pp_ptime build.start]
]) ])
same_input_same_output same_input_same_output
@ -573,9 +521,9 @@ module Job_build = struct
H.li [ H.li [
txtf "on %s, different input, " build'.platform; txtf "on %s, different input, " build'.platform;
H.a ~a:H.[ H.a ~a:H.[
a_href @@ Link.Compare_builds.make Fmt.kstr a_href "/compare/%a/%a"
~left:build'.uuid Uuidm.pp build'.uuid
~right:build.uuid ()] Uuidm.pp build.uuid]
[txtf "%a" pp_ptime build'.start] [txtf "%a" pp_ptime build'.start]
]) ])
different_input_same_output different_input_same_output
@ -603,9 +551,9 @@ module Job_build = struct
H.li [ H.li [
txtf "on %s, " build'.platform ; txtf "on %s, " build'.platform ;
H.a ~a:H.[ H.a ~a:H.[
a_href @@ Link.Compare_builds.make Fmt.kstr a_href "/compare/%a/%a"
~left:build'.uuid Uuidm.pp build'.uuid
~right:build.uuid ()] Uuidm.pp build.uuid]
[txtf "%a" pp_ptime build'.start] [txtf "%a" pp_ptime build'.start]
]) ])
same_input_different_output) same_input_different_output)
@ -624,9 +572,9 @@ module Job_build = struct
| Some b when not (Uuidm.equal build.uuid b.Builder_db.Build.uuid) -> | Some b when not (Uuidm.equal build.uuid b.Builder_db.Build.uuid) ->
[ H.li [ H.txt ctx; [ H.li [ H.txt ctx;
H.a ~a:[ H.a ~a:[
H.a_href @@ Link.Compare_builds.make Fmt.kstr H.a_href "/compare/%a/%a"
~left:b.uuid Uuidm.pp b.uuid
~right:build.uuid () ] Uuidm.pp build.uuid ]
[txtf "%a" pp_ptime b.start]] [txtf "%a" pp_ptime b.start]]
] ]
| _ -> [] | _ -> []
@ -639,7 +587,7 @@ module Job_build = struct
] ]
let make_build_info let make_build_info
~job_name ~name
~delta ~delta
~(build:Builder_db.Build.t) ~(build:Builder_db.Build.t)
~artifacts ~artifacts
@ -659,30 +607,24 @@ module Job_build = struct
H.ul [ H.ul [
H.li [ H.li [
H.a ~a:H.[ H.a ~a:H.[
a_href @@ Link.Job_build_artifact.make Fmt.kstr a_href "/job/%s/build/%a/console" name Uuidm.pp build.uuid
~job_name
~build:build.uuid
~artifact:`Console ()
] [H.txt "Console output"]; ] [H.txt "Console output"];
]; ];
H.li [ H.li [
H.a ~a:H.[ H.a ~a:H.[
a_href @@ Link.Job_build_artifact.make Fmt.kstr a_href "/job/%s/build/%a/script" name Uuidm.pp build.uuid
~job_name
~build:build.uuid
~artifact:`Script ()
] [H.txt "Build script"]; ] [H.txt "Build script"];
] ]
]; ];
] ]
@ make_artifacts @ make_artifacts
~job_name ~job_name:name
~build_uuid:build.uuid ~build_uuid:build.uuid
~artifacts ~artifacts
~main_binary ~main_binary
~solo5_manifest ~solo5_manifest
@ make_reproductions @ make_reproductions
~job_name ~name
~build ~build
~same_input_same_output ~same_input_same_output
~different_input_same_output ~different_input_same_output
@ -721,12 +663,11 @@ module Job_build = struct
font-weight: bold;\ font-weight: bold;\
" "
] ]
let make_viz_section ~job_name ~artifacts ~uuid = let make_viz_section ~name ~artifacts ~uuid =
let viz_deps = let viz_deps =
let iframe = let iframe =
let src = Link.Job_build_artifact.make ~job_name ~build:uuid let src = Fmt.str "/job/%s/build/%a/vizdependencies" name Uuidm.pp uuid in
~artifact:`Viz_dependencies () in
H.iframe ~a:H.[ H.iframe ~a:H.[
a_src src; a_src src;
a_title "Opam dependencies"; a_title "Opam dependencies";
@ -735,11 +676,11 @@ module Job_build = struct
in in
let descr_txt = "\ let descr_txt = "\
This is an interactive visualization of dependencies, \ This is an interactive visualization of dependencies, \
focusing on how shared dependencies are. focusing on how shared dependencies are.
In the middle you see the primary package. \ In the middle you see the primary package. \
Edges shoot out to its direct \ Edges shoot out to its direct \
dependencies, including build dependencies. dependencies, including build dependencies.
From these direct dependencies, edges shoot out to sets \ From these direct dependencies, edges shoot out to sets \
of their own respective direct dependencies. \ of their own respective direct dependencies. \
@ -756,9 +697,8 @@ dependency.\
[ iframe; H.br (); make_description descr_txt ] [ iframe; H.br (); make_description descr_txt ]
in in
let viz_treemap = lazy ( let viz_treemap = lazy (
let iframe = let iframe =
let src = Link.Job_build_artifact.make ~job_name ~build:uuid let src = Fmt.str "/job/%s/build/%a/viztreemap" name Uuidm.pp uuid in
~artifact:`Viz_treemap () in
H.iframe ~a:H.[ H.iframe ~a:H.[
a_src src; a_src src;
a_title "Binary dissection"; a_title "Binary dissection";
@ -768,7 +708,7 @@ dependency.\
let descr_txt = "\ let descr_txt = "\
This interactive treemap shows the space-usage of modules/libraries inside the \ This interactive treemap shows the space-usage of modules/libraries inside the \
ELF binary. You can get more info from each block by \ ELF binary. You can get more info from each block by \
hovering over them. hovering over them.
On top of the treemap there is a scale, showing how much space the \ On top of the treemap there is a scale, showing how much space the \
treemap itself constitutes of the binary, the excluded symbols/modules \ treemap itself constitutes of the binary, the excluded symbols/modules \
@ -786,7 +726,7 @@ and the rest of the unaccounted data.\
] ]
let make let make
~job_name ~name
~(build:Builder_db.Build.t) ~(build:Builder_db.Build.t)
~artifacts ~artifacts
~main_binary ~main_binary
@ -797,10 +737,10 @@ and the rest of the unaccounted data.\
~latest ~next ~previous ~latest ~next ~previous
= =
let delta = Ptime.diff build.finish build.start in let delta = Ptime.diff build.finish build.start in
let right_column = make_viz_section ~job_name ~artifacts ~uuid:build.uuid in let right_column = make_viz_section ~name ~artifacts ~uuid:build.uuid in
let left_column = let left_column =
make_build_info make_build_info
~job_name ~name
~delta ~delta
~build ~build
~artifacts ~artifacts
@ -816,7 +756,7 @@ and the rest of the unaccounted data.\
H.a_style "width: 45em; min-width: 43em;" in H.a_style "width: 45em; min-width: 43em;" in
let style_col_right = H.a_style "width: 50%" in let style_col_right = H.a_style "width: 50%" in
let body = [ let body = [
H.h1 [txtf "Job %s" job_name]; H.h1 [txtf "Job %s" name];
H.div~a:[ style_grid ] [ H.div~a:[ style_grid ] [
H.div~a:[ style_col_left ] left_column; H.div~a:[ style_col_left ] left_column;
H.div~a:[ style_col_right ] right_column H.div~a:[ style_col_right ] right_column
@ -824,8 +764,8 @@ and the rest of the unaccounted data.\
] ]
in in
layout layout
~nav:(`Build (job_name, build)) ~nav:(`Build (name, build))
~title:(Fmt.str "Job %s %a" job_name pp_ptime build.start) ~title:(Fmt.str "Job %s %a" name pp_ptime build.start)
~manual_width:true ~manual_width:true
body body
@ -851,24 +791,36 @@ let package_diffs diffs =
]) ])
diffs diffs
let duniverse_dirs dirs =
List.concat_map (fun p -> [
txtf "%a" Opamdiff.pp_duniverse_dir p;
H.br ();
]) dirs
let duniverse_diffs diffs =
List.concat_map (fun p -> [
txtf "%a" Opamdiff.pp_duniverse_diff p;
H.br ();
]) diffs
let opam_diffs diffs = let opam_diffs diffs =
List.concat_map (fun pd -> List.concat_map (fun pd ->
H.h4 [ txtf "%a" Opamdiff.pp_opam_diff pd ] :: H.h4 [ txtf "%a" Opamdiff.pp_opam_diff pd ] ::
H.h5 [ H.txt "diff" ] :: (match pd.Opamdiff.build with None -> [] | Some a ->
H.pre [ H.code [ H.txt pd.diff ] ] :: let l, r = Opamdiff.commands_to_strings a in
H.br () :: []) [
H.h5 [ H.txt "build instruction (without common prefix) \
modifications, old:" ] ;
H.code (List.concat_map (fun s -> [ H.txt s ; H.br () ]) l) ;
H.h5 [ H.txt "new" ] ;
H.code (List.concat_map (fun s -> [ H.txt s ; H.br () ]) r)
]) @
(match pd.Opamdiff.install with None -> [] | Some a ->
let l, r = Opamdiff.commands_to_strings a in
[
H.h5 [ H.txt "install instruction (without common prefix) \
modifications, old:" ] ;
H.code (List.concat_map (fun s -> [ H.txt s ; H.br () ]) l) ;
H.h5 [ H.txt "new" ] ;
H.code (List.concat_map (fun s -> [ H.txt s ; H.br () ]) r)
]) @
(match pd.Opamdiff.url with None -> [] | Some a ->
let l, r = Opamdiff.opt_url_to_string a in
[
H.h5 [ H.txt "URL" ] ;
txtf "old: %s" l;
H.br ();
txtf "new: %s" r
]) @
[ H.br () ])
diffs diffs
let compare_builds let compare_builds
@ -878,51 +830,8 @@ let compare_builds
~(build_right : Builder_db.Build.t) ~(build_right : Builder_db.Build.t)
~env_diff:(added_env, removed_env, changed_env) ~env_diff:(added_env, removed_env, changed_env)
~pkg_diff:(added_pkgs, removed_pkgs, changed_pkgs) ~pkg_diff:(added_pkgs, removed_pkgs, changed_pkgs)
~opam_diff:(opam_diff, version_diff, left, right, duniverse) ~opam_diff:(same, opam_diff, version_diff, left, right)
= =
let items, data =
List.fold_left (fun (items, data) (id, txt, amount, code) ->
let id_href = "#" ^ id in
if amount = 0 then
items, data
else
H.li [ H.a ~a:[H.a_href id_href] [txtf "%d %s" amount txt] ] :: items,
data @ H.h3 ~a:[H.a_id id] [H.txt txt] :: code)
([], [])
([ ("opam-packages-removed", "Opam packages removed",
OpamPackage.Set.cardinal left, [ H.code (packages left) ]) ;
("opam-packages-installede", "New opam packages installed",
OpamPackage.Set.cardinal right, [ H.code (packages right) ]) ;
("opam-packages-version-diff", "Opam packages with version changes",
List.length version_diff, [ H.code (package_diffs version_diff) ]) ;
] @ (match duniverse with
| Ok (duniverse_left, duniverse_right, duniverse_content_diff) ->
[
("duniverse-dirs-removed", "Duniverse directories removed",
List.length duniverse_left, [ H.code (duniverse_dirs duniverse_left) ]) ;
("duniverse-dirs-installed", "New duniverse directories installed",
List.length duniverse_right, [ H.code (duniverse_dirs duniverse_right) ]) ;
("duniverse-dirs-content-diff", "Duniverse directories with content changes",
List.length duniverse_content_diff, [ H.code (duniverse_diffs duniverse_content_diff) ]) ;
]
| Error `Msg msg -> [ "duniverse-dirs-error", "Duniverse parsing error", 1, [ H.txt msg ] ]
) @ [
("opam-packages-opam-diff", "Opam packages with changes in their opam file",
List.length opam_diff, opam_diffs opam_diff) ;
("env-removed", "Environment variables removed",
List.length removed_env, [ H.code (key_values removed_env) ]) ;
("env-added", "New environment variables added",
List.length added_env, [ H.code (key_values added_env) ]) ;
("env-changed", "Environment variables changed",
List.length changed_env, [ H.code (key_value_changes changed_env) ]) ;
("pkgs-removed", "System packages removed",
List.length removed_pkgs, [ H.code (key_values removed_pkgs) ]) ;
("pkgs-added", "New system packages added",
List.length added_pkgs, [ H.code (key_values added_pkgs) ]) ;
("pkgs-changed", "System packages changed",
List.length changed_pkgs, [ H.code (key_value_changes changed_pkgs) ]) ;
])
in
layout layout
~nav:(`Comparison ((job_left, build_left), (job_right, build_right))) ~nav:(`Comparison ((job_left, build_left), (job_right, build_right)))
~title:(Fmt.str "Comparing builds %a and %a" ~title:(Fmt.str "Comparing builds %a and %a"
@ -931,58 +840,128 @@ let compare_builds
H.h1 [H.txt "Comparing builds"]; H.h1 [H.txt "Comparing builds"];
H.h2 [ H.h2 [
H.txt "Builds "; H.txt "Builds ";
H.a ~a:H.[ a_href @@ H.a ~a:H.[a_href
Link.Job_build.make (Fmt.str "/job/%s/build/%a"
~job_name:job_left job_left
~build:build_left.uuid () ] Uuidm.pp build_left.uuid)]
[ txtf "%s@%a %a" [ txtf "%s@%a %a"
job_left job_left
pp_ptime build_left.start pp_ptime build_left.start
pp_platform (Some build_left.platform)]; pp_platform (Some build_left.platform)];
H.txt " and "; H.txt " and ";
H.a ~a:H.[ a_href @@ H.a ~a:H.[a_href
Link.Job_build.make (Fmt.str "/job/%s/build/%a"
~job_name:job_right job_right
~build:build_right.uuid () ] Uuidm.pp build_right.uuid)]
[ txtf "%s@%a %a" [ txtf "%s@%a %a"
job_right job_right
pp_ptime build_right.start pp_ptime build_right.start
pp_platform (Some build_right.platform)]; pp_platform (Some build_right.platform)];
]; ];
H.h3 [ H.a ~a:H.[ H.h3 [ H.a ~a:H.[
a_href @@ Link.Compare_builds.make Fmt.kstr a_href "/compare/%a/%a"
~left:build_right.uuid Uuidm.pp build_right.uuid
~right:build_left.uuid () ] Uuidm.pp build_left.uuid ]
[H.txt "Compare in reverse direction"]] ; [H.txt "Compare in reverse direction"]] ;
H.ul (List.rev items) ] @ data) H.ul [
H.li [
H.a ~a:H.[a_href "#opam-packages-removed"]
[txtf "%d opam packages removed"
(OpamPackage.Set.cardinal left)]
];
H.li [
H.a ~a:H.[a_href "#opam-packages-installed"]
[txtf "%d new opam packages installed"
(OpamPackage.Set.cardinal right)]
];
H.li [
H.a ~a:H.[a_href "#opam-packages-version-diff"]
[txtf "%d opam packages with version changes"
(List.length version_diff)]
];
H.li [
H.a ~a:H.[a_href "#opam-packages-opam-diff"]
[txtf "%d opam packages with changes in their opam file"
(List.length opam_diff)]
];
H.li [
H.a ~a:H.[a_href "#opam-packages-unchanged"]
[txtf "%d opam packages unchanged" (OpamPackage.Set.cardinal same)]
];
H.li [
H.a ~a:H.[a_href "#env-added"]
[ txtf "%d environment variables added" (List.length added_env)]
];
H.li [
H.a ~a:H.[a_href "#env-removed"]
[ txtf "%d environment variables removed" (List.length removed_env)]
];
H.li [
H.a ~a:H.[a_href "#env-changed"]
[ txtf "%d environment variables changed" (List.length changed_env)]
];
H.li [
H.a ~a:H.[a_href "#pkgs-added"]
[ txtf "%d system packages added" (List.length added_pkgs)]
];
H.li [
H.a ~a:H.[a_href "#pkgs-removed"]
[ txtf "%d system packages removed" (List.length removed_pkgs)]
];
H.li [
H.a ~a:H.[a_href "#pkgs-changed"]
[ txtf "%d system packages changed" (List.length changed_pkgs)]
];
];
H.h3 ~a:H.[a_id "opam-packages-removed"]
[H.txt "Opam packages removed"];
H.code (packages left);
H.h3 ~a:H.[a_id "opam-packages-installed"]
[H.txt "New opam packages installed"];
H.code (packages right);
H.h3 ~a:H.[a_id "opam-packages-version-diff"]
[H.txt "Opam packages with version changes"];
H.code (package_diffs version_diff);
H.h3 ~a:H.[a_id "opam-packages-opam-diff"]
[H.txt "Opam packages with changes in their opam file"]] @
opam_diffs opam_diff @ [
H.h3 ~a:H.[a_id "opam-packages-unchanged"]
[H.txt "Unchanged opam packages"];
H.code (packages same);
H.h3 ~a:H.[a_id "env-added"] [H.txt "Environment variables added"];
H.code (key_values added_env);
H.h3 ~a:H.[a_id "env-removed"] [H.txt "Environment variables removed"];
H.code (key_values removed_env);
H.h3 ~a:H.[a_id "env-changed"] [H.txt "Environment variables changed"];
H.code (key_value_changes changed_env);
H.h3 ~a:H.[a_id "pkgs-added"] [H.txt "System packages added"];
H.code (key_values added_pkgs);
H.h3 ~a:H.[a_id "pkgs-removed"] [H.txt "System packages removed"];
H.code (key_values removed_pkgs);
H.h3 ~a:H.[a_id "pkgs-changed"] [H.txt "System packages changed"];
H.code (key_value_changes changed_pkgs);
])
let failed_builds ~start ~count builds = let failed_builds ~start ~count builds =
let build (job_name, build) = let build (job_name, build) =
H.li [ H.li [
check_icon build.Builder_db.Build.result; check_icon build.Builder_db.Build.result;
txtf " %s %a " job_name pp_platform (Some build.platform); txtf " %s %a " job_name pp_platform (Some build.platform);
H.a ~a:H.[ a_href @@ Link.Job_build.make ~job_name ~build:build.uuid () ] H.a ~a:H.[Fmt.kstr a_href "/job/%s/build/%a" job_name Uuidm.pp build.uuid]
[txtf "%a" pp_ptime build.start]; [txtf "%a" pp_ptime build.start];
txtf " %a" Builder.pp_execution_result build.result; txtf " %a" Builder.pp_execution_result build.result;
] ]
in in
layout ~title:"Failed builds" layout ~title:"Failed builds"
(match builds with ([
| [] -> H.h1 [H.txt "Failed builds"];
[ H.ul (List.map build builds);
H.h1 [H.txt "No failed builds to list"]; H.p [ txtf "View the next %d failed builds " count;
H.p [H.txt "🥳"]; H.a ~a:H.[
] Fmt.kstr a_href "/failed-builds/?count=%d&start=%d"
| _ :: _ -> count (start + count) ]
[ [ H.txt "here"];
H.h1 [H.txt "Failed builds"]; H.txt ".";
H.ul (List.map build builds); ]
H.p [ txtf "View the next %d failed builds " count; ])
H.a ~a:H.[
a_href @@ Link.Failed_builds.make
~count ~start:(start + count) () ]
[ H.txt "here"];
H.txt ".";
]
])

View file

@ -1,3 +1,3 @@
(library (library
(name opamdiff) (name opamdiff)
(libraries opam-core opam-format yojson bos)) (libraries opam-core opam-format))

View file

@ -1,156 +1,13 @@
module Set = OpamPackage.Set module Set = OpamPackage.Set
type package = OpamPackage.t
let packages (switch : OpamFile.SwitchExport.t) = let packages (switch : OpamFile.SwitchExport.t) =
assert (Set.cardinal switch.selections.sel_pinned = 0); assert (Set.cardinal switch.selections.sel_pinned = 0);
assert (Set.cardinal switch.selections.sel_compiler = 0); assert (Set.cardinal switch.selections.sel_compiler = 0);
assert (Set.subset switch.selections.sel_roots switch.selections.sel_installed); assert (Set.subset switch.selections.sel_roots switch.selections.sel_installed);
switch.selections.sel_installed switch.selections.sel_installed
let duniverse_dir = "x-opam-monorepo-duniverse-dirs"
module M = Map.Make(String)
let duniverse_dirs_data =
(* the representation in the file is [ URL DIR [ HASH* ] ] *)
let open OpamParserTypes.FullPos in
let ( let* ) = Result.bind in
let string ~ctx = function
| { pelem = String s ; _ } -> Ok s
| _ -> Error (`Msg ("couldn't find a string " ^ ctx))
in
let extract_data = function
| { pelem = List { pelem = [ url ; dir ; hashes ] ; _ } ; _ } ->
let* url = string ~ctx:"url" url in
let* hashes =
match hashes with
| { pelem = List { pelem = hashes ; _ } ; _ } ->
List.fold_left (fun acc hash ->
let* acc = acc in
let* hash = string ~ctx:"hash" hash in
let* h = match OpamHash.of_string_opt hash with
| Some h -> Ok OpamHash.(kind h, contents h)
| None -> Error (`Msg ("couldn't decode opam hash in " ^ hash))
in
Ok (h :: acc))
(Ok []) hashes
| _ -> Error (`Msg "couldn't decode hashes")
in
let* dir = string ~ctx:"directory" dir in
Ok (url, dir, List.rev hashes)
| { pelem = List { pelem = [ url ; dir ] ; _ } ; _ } ->
let* url = string ~ctx:"url" url in
let* dir = string ~ctx:"directory" dir in
Ok (url, dir, [])
| _ -> Error (`Msg "expected a list of URL, DIR, [HASHES]")
in
function
| { pelem = List { pelem = lbody ; _ } ; _ } ->
List.fold_left (fun acc v ->
let* acc = acc in
let* (url, dir, hashes) = extract_data v in
Ok (M.add dir (url, hashes) acc))
(Ok M.empty) lbody
| _ -> Error (`Msg "expected a list or a nested list")
let duniverse (switch : OpamFile.SwitchExport.t) =
let root = switch.OpamFile.SwitchExport.selections.OpamTypes.sel_roots in
if OpamPackage.Set.cardinal root = 1 then
let root = OpamPackage.Set.choose root in
match OpamPackage.(Name.Map.find_opt root.name switch.OpamFile.SwitchExport.overlays) with
| None -> Error (`Msg "opam switch export doesn't contain the main package")
| Some opam ->
match OpamFile.OPAM.extended opam duniverse_dir duniverse_dirs_data with
| None -> Ok None
| Some Error e -> Error e
| Some Ok v -> Ok (Some v)
else
Error (`Msg "not a single root package found in opam switch export")
type duniverse_diff = {
name : string ;
urls : string * string option ;
hash : (OpamHash.kind * string option * string option) list ;
}
let pp_duniverse_diff ppf { name ; urls ; hash } =
let opt_hash = Option.value ~default:"NONE" in
Format.fprintf ppf "%s (%s%s) %s"
name
(fst urls)
(Option.fold ~none:"" ~some:(fun url -> "->" ^ url) (snd urls))
(String.concat ", " (List.map (fun (h, l, r) ->
OpamHash.string_of_kind h ^ " " ^ opt_hash l ^ "->" ^ opt_hash r) hash))
let pp_duniverse_dir ppf (dir, url) =
Format.fprintf ppf "%s (%s)" dir url
let duniverse_diff l r =
let l = Option.value l ~default:M.empty
and r = Option.value r ~default:M.empty
in
let keys_l_only = ref [] and keys_r_only = ref [] and diff = ref [] in
let equal_hashes l r =
(* l and r are lists of pairs, with the hash kind and its value *)
(* for a git remote, the hashes are empty lists *)
(match l with [] -> false | _ -> true) &&
(match r with [] -> false | _ -> true) &&
List.for_all (fun (h, v) ->
match List.assoc_opt h r with
| None -> false
| Some v' -> String.equal v v')
l &&
List.for_all (fun (h, v) ->
match List.assoc_opt h l with
| None -> false
| Some v' -> String.equal v v')
r
in
let _ =
M.merge (fun key l r ->
match l, r with
| None, Some _ -> keys_r_only := key :: !keys_r_only; None
| Some _, None -> keys_l_only := key :: !keys_l_only; None
| None, None -> None
| Some (_, l), Some (_, r) when equal_hashes l r -> None
| Some (url1, []), Some (url2, []) when String.equal url1 url2 -> None
| Some l, Some r -> diff := (key, l, r) :: !diff; None)
l r
in
let dir_only keys map =
let only =
M.filter (fun k _ -> List.mem k keys) map |> M.bindings
in
List.map (fun (key, (url, _)) -> key, url) only
in
let l_only = dir_only !keys_l_only l
and r_only = dir_only !keys_r_only r
and diff =
List.map (fun (name, (url_l, hashes_l), (url_r, hashes_r)) ->
let urls =
if String.equal url_l url_r then url_l, None else url_l, Some url_r
in
let hash =
List.fold_left (fun acc (h, v) ->
match List.assoc_opt h hashes_r with
| None -> (h, Some v, None) :: acc
| Some v' ->
if String.equal v v' then
acc
else
(h, Some v, Some v') :: acc)
[] hashes_l
in
let hash = List.fold_left (fun acc (h', v') ->
match List.assoc_opt h' hashes_l with
| None -> (h', None, Some v') :: acc
| Some _ -> acc)
hash hashes_r
in
{ name ; urls ; hash })
!diff
in
l_only, r_only, diff
type version_diff = { type version_diff = {
name : OpamPackage.Name.t; name : OpamPackage.Name.t;
version_left : OpamPackage.Version.t; version_left : OpamPackage.Version.t;
@ -168,67 +25,64 @@ let pp_version_diff ppf { name; version_left; version_right } =
type opam_diff = { type opam_diff = {
pkg : OpamPackage.t ; pkg : OpamPackage.t ;
effectively_equal : bool ; build : (OpamTypes.command list * OpamTypes.command list) option ;
diff : string ; install : (OpamTypes.command list * OpamTypes.command list) option ;
url : (OpamFile.URL.t option * OpamFile.URL.t option) option ;
otherwise_equal : bool ;
} }
let pp_opam_diff ppf { pkg ; effectively_equal ; _ } = let commands_to_strings (l, r) =
let v a =
OpamPrinter.FullPos.value (OpamPp.print OpamFormat.V.command a)
in
List.map v l, List.map v r
let opt_url_to_string (l, r) =
let url_to_s = function
| None -> "" | Some u -> OpamFile.URL.write_to_string u
in
url_to_s l, url_to_s r
let pp_opam_diff ppf { pkg ; otherwise_equal ; _ } =
Format.fprintf ppf "%a%s" Format.fprintf ppf "%a%s"
pp_opampackage pkg pp_opampackage pkg
(if effectively_equal then "" else " (effectively equal)") (if otherwise_equal then "" else " (and additional changes)")
let rec strip_common_prefix a b =
match a, b with
| hd::tl, hd'::tl' ->
if hd = hd' then
strip_common_prefix tl tl'
else
a, b
| a, b -> a, b
let detailed_opam_diff pkg l r = let detailed_opam_diff pkg l r =
let opaml = OpamFile.OPAM.write_to_string l in let no_build_install_url p =
let opamr = OpamFile.OPAM.with_url_opt None
(* Let's minimize the difference between opaml and opamr by taking opaml (OpamFile.OPAM.with_install []
as template for opamr. *) (OpamFile.OPAM.with_build [] p))
let o = OpamFile.make (OpamFilename.raw "opam") in
OpamFile.OPAM.to_string_with_preserved_format ~format_from_string:opaml o r
in in
let effectively_equal = let otherwise_equal =
let no_build_install_url p =
OpamFile.OPAM.with_url_opt None
(OpamFile.OPAM.with_install []
(OpamFile.OPAM.with_build [] p))
in
OpamFile.OPAM.effectively_equal OpamFile.OPAM.effectively_equal
(no_build_install_url l) (no_build_install_url r) (no_build_install_url l) (no_build_install_url r)
and build =
if OpamFile.OPAM.build l = OpamFile.OPAM.build r then
None
else
Some (strip_common_prefix (OpamFile.OPAM.build l) (OpamFile.OPAM.build r))
and install =
if OpamFile.OPAM.install l = OpamFile.OPAM.install r then
None
else
Some (strip_common_prefix (OpamFile.OPAM.install l) (OpamFile.OPAM.install r))
and url =
if OpamFile.OPAM.url l = OpamFile.OPAM.url r then
None
else
Some (OpamFile.OPAM.url l, OpamFile.OPAM.url r)
in in
let diff = { pkg ; build ; install ; url ; otherwise_equal }
let label_l =
Printf.sprintf "left/%s/opam" (OpamPackage.name_to_string pkg)
and label_r =
Printf.sprintf "right/%s/opam" (OpamPackage.name_to_string pkg)
in
try
Bos.OS.File.with_tmp_oc "opaml_%s"
(fun pl oc () ->
Out_channel.output_string oc opaml;
Out_channel.close oc;
Bos.OS.File.with_tmp_oc "opamr_%s"
(fun pr oc () ->
Out_channel.output_string oc opamr;
Out_channel.close oc;
let cmd =
Bos.Cmd.(v "diff" % "-u" % "--label" % label_l % "--label" % label_r %
p pl % p pr)
in
Bos.OS.Cmd.(run_out cmd |> out_string))
())
()
with e ->
Error (`Msg ("exception " ^ Printexc.to_string e))
in
let diff = match diff with
| Ok (Ok (Ok (data, _))) -> data
| Ok (Ok (Error `Msg msg))
| Ok (Error `Msg msg)
| Error `Msg msg ->
Logs.err (fun m -> m "Error %s while running diff on opam files@.@.%s@.@.%s@.@."
msg opaml opamr);
"Error comparing opam files"
in
{ pkg ; effectively_equal ; diff }
let detailed_opam_diffs left right pkgs = let detailed_opam_diffs left right pkgs =
OpamPackage.Set.fold (fun p acc -> OpamPackage.Set.fold (fun p acc ->
@ -248,12 +102,12 @@ let compare left right =
l l
in in
let same_version = Set.inter packages_left packages_right in let same_version = Set.inter packages_left packages_right in
let opam_diff = let (same, opam_diff) =
Set.filter Set.partition
(fun p -> (fun p ->
let find = OpamPackage.Name.Map.find p.name in let find = OpamPackage.Name.Map.find p.name in
let opam_left = find left.overlays and opam_right = find right.overlays in let opam_left = find left.overlays and opam_right = find right.overlays in
not (OpamFile.OPAM.effectively_equal opam_left opam_right)) OpamFile.OPAM.effectively_equal opam_left opam_right)
same_version same_version
and version_diff = and version_diff =
List.filter_map (fun p1 -> List.filter_map (fun p1 ->
@ -272,56 +126,4 @@ let compare left right =
and right_pkgs = diff packages_right packages_left and right_pkgs = diff packages_right packages_left
in in
let opam_diff = detailed_opam_diffs left right opam_diff in let opam_diff = detailed_opam_diffs left right opam_diff in
let duniverse_ret = (same, opam_diff, version_diff, left_pkgs, right_pkgs)
match duniverse left, duniverse right with
| Ok l, Ok r -> Ok (duniverse_diff l r)
| Error _ as e, _ | _, (Error _ as e) -> e
in
(opam_diff, version_diff, left_pkgs, right_pkgs, duniverse_ret)
let compare_to_json
(opam_diff, version_diff, left_pkgs, right_pkgs, duniverse_diff) : Yojson.Basic.t =
let version_diff_to_json lst =
`List (List.map (fun { name; version_left; version_right } ->
`Assoc [
("name", `String (OpamPackage.Name.to_string name));
("version_left", `String (OpamPackage.Version.to_string version_left));
("version_right", `String (OpamPackage.Version.to_string version_right))
]) lst)
in
let package_set_to_json set =
`List (Set.fold (fun p acc ->
let json = `Assoc [
("name", `String (OpamPackage.Name.to_string p.OpamPackage.name));
("version", `String (OpamPackage.Version.to_string p.OpamPackage.version))
] in
json :: acc) set [])
in
let opam_diff_to_json opam_diff =
`List (List.map (fun (diff : opam_diff) ->
`Assoc [
("package_version", `String (OpamPackage.to_string diff.pkg));
("effectively_equal", `Bool diff.effectively_equal);
("diff", `String diff.diff);
]) opam_diff)
in
let duniverse_to_json = function
| Ok (left, right, detailed_diff) ->
`Assoc [
("left", `List (List.map (fun (k, v) -> `Assoc [("name", `String k); ("value", `String v)]) left));
("right", `List (List.map (fun (k, v) -> `Assoc [("name", `String k); ("value", `String v)]) right));
("detailed_diff",`List (List.map (fun (diff : duniverse_diff) ->
`Assoc [
("name", `String diff.name);
]) detailed_diff))
]
| Error (`Msg msg) ->
`String msg
in
`Assoc [
("opam_diff", opam_diff_to_json opam_diff);
("version_diff", version_diff_to_json version_diff);
("only_in_left", package_set_to_json left_pkgs);
("only_in_right", package_set_to_json right_pkgs);
("duniverse_diff", duniverse_to_json duniverse_diff)
]

View file

@ -1,34 +0,0 @@
type opam_diff = {
pkg : OpamPackage.t ;
effectively_equal : bool ;
diff : string ;
}
type version_diff = {
name : OpamPackage.Name.t;
version_left : OpamPackage.Version.t;
version_right : OpamPackage.Version.t;
}
type duniverse_diff = {
name : string ;
urls : string * string option ;
hash : (OpamHash.kind * string option * string option) list ;
}
val pp_opampackage : Format.formatter -> OpamPackage.t -> unit
val pp_version_diff : Format.formatter -> version_diff -> unit
val pp_duniverse_diff : Format.formatter -> duniverse_diff -> unit
val pp_duniverse_dir : Format.formatter -> string * string -> unit
val pp_opam_diff : Format.formatter -> opam_diff -> unit
val compare : OpamFile.SwitchExport.t ->
OpamFile.SwitchExport.t ->
opam_diff list * version_diff list * OpamPackage.Set.t * OpamPackage.Set.t * ((string * string) list * (string * string) list * duniverse_diff list, [> `Msg of string ]) result
val compare_to_json : opam_diff list * version_diff list * OpamPackage.Set.t * OpamPackage.Set.t *
((string * string) list * (string * string) list * duniverse_diff list, [< `Msg of string ]) result -> Yojson.Basic.t

View file

@ -32,8 +32,6 @@ Options:
Hex encoded SHA256 digest of the main binary. Hex encoded SHA256 digest of the main binary.
--job=STRING --job=STRING
Job name that was built. Job name that was built.
--main-binary-filepath=STRING
The file path of the main binary.
EOM EOM
exit 1 exit 1
} }
@ -41,7 +39,6 @@ EOM
BUILD_TIME= BUILD_TIME=
SHA= SHA=
JOB= JOB=
FILEPATH=
while [ $# -gt 1 ]; do while [ $# -gt 1 ]; do
OPT="$1" OPT="$1"
@ -56,9 +53,6 @@ while [ $# -gt 1 ]; do
--job=*) --job=*)
JOB="${OPT##*=}" JOB="${OPT##*=}"
;; ;;
--main-binary-filepath=*)
FILEPATH="${OPT##*=}"
;;
--*) --*)
warn "Ignoring unknown option: '${OPT}'" warn "Ignoring unknown option: '${OPT}'"
;; ;;
@ -73,14 +67,13 @@ done
[ -z "${BUILD_TIME}" ] && die "The --build-time option must be specified" [ -z "${BUILD_TIME}" ] && die "The --build-time option must be specified"
[ -z "${SHA}" ] && die "The --sha256 option must be specified" [ -z "${SHA}" ] && die "The --sha256 option must be specified"
[ -z "${JOB}" ] && die "The --job option must be specified" [ -z "${JOB}" ] && die "The --job option must be specified"
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
FILENAME="${1}" FILENAME="${1}"
: "${REPO:="/usr/local/www/pkg"}" : "${REPO:="/usr/local/www/pkg"}"
: "${REPO_KEY:="/usr/local/etc/builder-web/repo.key"}" : "${REPO_KEY:="/usr/local/etc/builder-web/repo.key"}"
if [ "$(basename "${FILEPATH}" .pkg)" = "$(basename "${FILEPATH}")" ]; then if [ "$(basename "${FILENAME}" .pkg)" = "$(basename "${FILENAME}")" ]; then
echo "Not a FreeBSD package" echo "Not a FreeBSD package"
exit 0 exit 0
fi fi
@ -108,8 +101,8 @@ mv "${TMP}/usr" "${PKG_ROOT}"
VERSION=$(jq -r '.version' "${MANIFEST}") VERSION=$(jq -r '.version' "${MANIFEST}")
# if we've a tagged version (1.5.0), append the number of commits and a dummy hash # if we've a tagged version (1.5.0), append the number of commits and a dummy hash
VERSION_GOOD=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$') || true VERSION_GOOD=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$')
VERSION_WITH_COMMIT=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+\.g[0-9a-fA-f]\+$') || true VERSION_WITH_COMMIT=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+\.g[0-9a-fA-f]\+$')
if [ $VERSION_GOOD -eq 0 -a $VERSION_WITH_COMMIT -eq 0 ]; then if [ $VERSION_GOOD -eq 0 -a $VERSION_WITH_COMMIT -eq 0 ]; then
die "version does not conform to (MAJOR.MINOR.PATCH[.#NUM_COMMITS.g<HASH>])" die "version does not conform to (MAJOR.MINOR.PATCH[.#NUM_COMMITS.g<HASH>])"
fi fi
@ -131,7 +124,6 @@ PKG_DIR="${REPO_DIR}/All"
# and then move it before recreating the index # and then move it before recreating the index
pkg create -r "${PKG_ROOT}" -m "${MANIFEST}" -o "${TMP}" pkg create -r "${PKG_ROOT}" -m "${MANIFEST}" -o "${TMP}"
mkdir -p "${PKG_DIR}" mkdir -p "${PKG_DIR}"
rm -f "${PKG_DIR}"/"${NAME}"-*.pkg
mv "${TMP}/${NAME}-${FULL_VERSION}.pkg" "${PKG_DIR}" mv "${TMP}/${NAME}-${FULL_VERSION}.pkg" "${PKG_DIR}"
pkg repo "${REPO_DIR}" "${REPO_KEY}" pkg repo "${REPO_DIR}" "${REPO_KEY}"

View file

@ -2,7 +2,7 @@ name: builder-web
version: %%VERSION_NUM%% version: %%VERSION_NUM%%
origin: local/builder-web origin: local/builder-web
comment: Builder web service comment: Builder web service
www: https://git.robur.coop/robur/builder-web www: https://git.robur.io/robur/builder-web
maintainer: Robur <team@robur.coop> maintainer: Robur <team@robur.coop>
prefix: /usr/local prefix: /usr/local
licenselogic: single licenselogic: single
@ -18,12 +18,6 @@ deps {
}, },
sqlite3 { sqlite3 {
origin = "databases/sqlite3"; origin = "databases/sqlite3";
},
opam-graph {
origin = "local/opam-graph";
},
modulectomy {
origin = "local/modulectomy";
} }
} }
scripts : { scripts : {

View file

@ -35,12 +35,12 @@ install -U "$bdir/builder-migrations" "$sbindir/builder-migrations"
install -U "$bdir/builder-db" "$sbindir/builder-db" install -U "$bdir/builder-db" "$sbindir/builder-db"
# stage visualization scripts # stage visualization scripts
install -U "$basedir/packaging/batch-viz.sh" "$confdir/batch-viz.sh.sample" install -U "$bdir/packaging/batch-viz.sh" "$confdir/batch-viz.sh"
install -U "$basedir/packaging/visualizations.sh" "$confdir/upload-hooks/visualizations.sh.sample" install -U "$bdir/packaging/visualizations.sh" "$confdir/upload-hooks/visualizations.sh"
# example repo scripts # example repo scripts
install -U "$basedir/packaging/dpkg-repo.sh" "$sharedir/dpkg-repo.sh" install -U "$bdir/packaging/dpkg-repo.sh" "$sharedir/dpkg-repo.sh"
install -U "$basedir/packaging/FreeBSD-repo.sh" "$sharedir/FreeBSD-repo.sh" install -U "$bdir/packaging/FreeBSD-repo.sh" "$sharedir/FreeBSD-repo.sh"
# create +MANIFEST # create +MANIFEST
flatsize=$(find "$rootdir" -type f -exec stat -f %z {} + | flatsize=$(find "$rootdir" -type f -exec stat -f %z {} + |
@ -59,6 +59,6 @@ sed -e "s:%%FLATSIZE%%:${flatsize}:" -e "/^[Vv]ersion:/s/-/./g" "$pdir/MANIFEST"
export SOURCE_DATE_EPOCH=$(git log -1 --pretty=format:%ct) export SOURCE_DATE_EPOCH=$(git log -1 --pretty=format:%ct)
pkg create -r "$rootdir" -M "$manifest" -o "$basedir/" pkg create -r "$rootdir" -M "$manifest" -o "$basedir/"
mv "$basedir"/builder-web-*.pkg "$basedir/builder-web.pkg" mv "$basedir/builder-web-*.pkg" "$basedir/builder-web.pkg"
echo 'bin: [ "builder-web.pkg" ]' > "$basedir/builder-web.install" echo 'bin: [ "builder-web.pkg" ]' > "$basedir/builder-web.install"
echo 'doc: [ "README.md" ]' >> "$basedir/builder-web.install" echo 'doc: [ "README.md" ]' >> "$basedir/builder-web.install"

View file

@ -33,7 +33,7 @@ procname="/usr/local/libexec/builder-web"
builder_web_start () { builder_web_start () {
echo "Starting ${name}." echo "Starting ${name}."
/usr/sbin/daemon -S -r -P "${pidfile}" -u "${builder_web_user}" \ /usr/sbin/daemon -S -p "${pidfile}" -u "${builder_web_user}" \
"${procname}" ${builder_web_flags} "${procname}" ${builder_web_flags}
} }

View file

@ -26,7 +26,7 @@ die()
usage() usage()
{ {
cat <<EOM 1>&2 cat <<EOM 1>&2
usage: ${prog_NAME} [ OPTIONS ] usage: ${prog_NAME} [ OPTIONS ]
Generates visualizations of all things Generates visualizations of all things
--data-dir=STRING --data-dir=STRING
Path to the data directory. Path to the data directory.
@ -77,8 +77,6 @@ done
DB="${DATA_DIR}/builder.sqlite3" DB="${DATA_DIR}/builder.sqlite3"
[ ! -e "$DB" ] && die "The database doesn't exist: '$DB'" [ ! -e "$DB" ] && die "The database doesn't exist: '$DB'"
# Let's be somewhat lenient with the database version.
# In visualizations.sh we can be more strict.
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")" DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'" [ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
[ "$DB_VERSION" -lt 16 ] && die "The database version should be >= 16. It is '$DB_VERSION'" [ "$DB_VERSION" -lt 16 ] && die "The database version should be >= 16. It is '$DB_VERSION'"
@ -87,7 +85,7 @@ APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'" [ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be = 1234839235. It is '$APP_ID'" [ "$APP_ID" -ne 1234839235 ] && die "The application-id should be = 1234839235. It is '$APP_ID'"
echo echo
echo "-----------------------------------------------------------------------------" echo "-----------------------------------------------------------------------------"
info "Starting batch creation of visualizations: $(date)" info "Starting batch creation of visualizations: $(date)"
@ -129,22 +127,8 @@ fi
ATTEMPTED_VIZS=0 ATTEMPTED_VIZS=0
FAILED_VIZS=0 FAILED_VIZS=0
distinct-input () { for i in $(find "${DATA_DIR}" -type f -path \*output/bin\*); do
{ UUID=$(echo "${i}" | rev | cut -d '/' -f 4 | rev)
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
FROM build b
JOIN build_artifact opam ON opam.build = b.id
WHERE opam.filepath = 'opam-switch' AND b.main_binary NOT NULL
GROUP BY opam.sha256;"
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
FROM build b
JOIN build_artifact debug ON debug.build = b.id
WHERE debug.filepath LIKE '%.debug' AND b.main_binary NOT NULL
GROUP BY debug.sha256;"
} | sort -u
}
for UUID in $(distinct-input); do
if ! "$VISUALIZATIONS_CMD" \ if ! "$VISUALIZATIONS_CMD" \
--data-dir="${DATA_DIR}" \ --data-dir="${DATA_DIR}" \
--cache-dir="${CACHE_DIR}" \ --cache-dir="${CACHE_DIR}" \

View file

@ -15,14 +15,11 @@ freebsd_sanitize_version () {
exit 1; exit 1;
fi fi
if [ $version_with_commit -eq 0 ]; then if [ $version_with_commit -eq 0 ]; then
v="${v}.0.g0000000.${post}" v="${v}.0.g0000000"
else
v="${v}.${post}"
fi fi
echo $v echo $v
} }
echo "using FreeBSD pkg to compare versions now:"
while read version_a version_b; do while read version_a version_b; do
version_a=$(freebsd_sanitize_version $version_a) version_a=$(freebsd_sanitize_version $version_a)
version_b=$(freebsd_sanitize_version $version_b) version_b=$(freebsd_sanitize_version $version_b)
@ -30,28 +27,7 @@ while read version_a version_b; do
printf "%s %s %s\n" "$version_a" "$result" "$version_b" printf "%s %s %s\n" "$version_a" "$result" "$version_b"
done < versions.txt done < versions.txt
debian_sanitize_version () {
post=$(echo $1 | rev | cut -d '-' -f 1-2 | rev)
v=$(echo $1 | rev | cut -d '-' -f 3- | rev)
version_good=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$')
version_with_commit=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$')
if [ $version_good -eq 0 -a $version_with_commit -eq 0 ]; then
echo "invalid version $v";
exit 1;
fi
if [ $version_with_commit -eq 0 ]; then
v="${v}-0-g0000000-${post}"
else
v="${v}-${post}"
fi
echo $v
}
echo ""
echo "using Debian dpkg to compare versions now:"
while read version_a version_b; do while read version_a version_b; do
version_a=$(debian_sanitize_version $version_a)
version_b=$(debian_sanitize_version $version_b)
if dpkg --compare-versions "$version_a" lt "$version_b"; then if dpkg --compare-versions "$version_a" lt "$version_b"; then
echo "$version_a < $version_b" echo "$version_a < $version_b"
else else

View file

@ -4,10 +4,10 @@ Section: unknown
Priority: optional Priority: optional
Maintainer: Robur Team <team@robur.coop> Maintainer: Robur Team <team@robur.coop>
Standards-Version: 4.4.1 Standards-Version: 4.4.1
Homepage: https://git.robur.coop/robur/builder-web Homepage: https://git.robur.io/robur/builder-web
Vcs-Browser: https://git.robur.coop/robur/builder-web Vcs-Browser: https://git.robur.io/robur/builder-web
Vcs-Git: https://git.robur.coop/robur/builder-web.git Vcs-Git: https://git.robur.io/robur/builder-web.git
Architecture: all Architecture: all
Depends: libgmp10, libsqlite3-0, libev4, opam-graph, modulectomy Depends: libgmp10, libsqlite3-0, libev4
Description: Web service for storing and presenting builds. Description: Web service for storing and presenting builds.
Builder-web stores builds in a sqlite database and serves them via HTTP. Builder-web stores builds in a sqlite database and serves them via HTTP.

View file

@ -1,7 +1,7 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: builder-web Upstream-Name: builder-web
Upstream-Contact: Robur Team <team@robur.coop> Upstream-Contact: Robur Team <team@robur.coop>
Source: https://git.robur.coop/robur/builder-web Source: https://git.robur.io/robur/builder-web
Files: * Files: *
Copyright: "Reynir Björnsson <reynir@reynir.dk>" "Hannes Mehnert <hannes@mehnert.org>" Copyright: "Reynir Björnsson <reynir@reynir.dk>" "Hannes Mehnert <hannes@mehnert.org>"

View file

@ -36,8 +36,6 @@ Options:
Job name that was built. Job name that was built.
--platform=STRING --platform=STRING
Platform name on which the build was performed. Platform name on which the build was performed.
--main-binary-filepath=STRING
The file path of the main binary.
EOM EOM
exit 1 exit 1
} }
@ -46,7 +44,6 @@ BUILD_TIME=
SHA= SHA=
JOB= JOB=
PLATFORM= PLATFORM=
FILEPATH=
while [ $# -gt 1 ]; do while [ $# -gt 1 ]; do
OPT="$1" OPT="$1"
@ -64,9 +61,6 @@ while [ $# -gt 1 ]; do
--platform=*) --platform=*)
PLATFORM="${OPT##*=}" PLATFORM="${OPT##*=}"
;; ;;
--main-binary-filepath=*)
FILEPATH="${OPT##*=}"
;;
--*) --*)
warn "Ignoring unknown option: '${OPT}'" warn "Ignoring unknown option: '${OPT}'"
;; ;;
@ -82,11 +76,10 @@ done
[ -z "${SHA}" ] && die "The --sha256 option must be specified" [ -z "${SHA}" ] && die "The --sha256 option must be specified"
[ -z "${JOB}" ] && die "The --job option must be specified" [ -z "${JOB}" ] && die "The --job option must be specified"
[ -z "${PLATFORM}" ] && die "The --platform option must be specified" [ -z "${PLATFORM}" ] && die "The --platform option must be specified"
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
FILENAME="${1}" FILENAME="${1}"
if [ $(basename "${FILEPATH}" .deb) = $(basename "${FILEPATH}") ]; then if [ $(basename "${FILENAME}" .deb) = $(basename "${FILENAME}") ]; then
echo "Not a Debian package" echo "Not a Debian package"
exit 0 exit 0
fi fi
@ -111,16 +104,6 @@ mkdir "${PKG_ROOT}"
dpkg-deb -R "${FILENAME}" "${PKG_ROOT}" dpkg-deb -R "${FILENAME}" "${PKG_ROOT}"
VERSION=$(dpkg-deb -f "${FILENAME}" Version) VERSION=$(dpkg-deb -f "${FILENAME}" Version)
# if we've a tagged version (1.5.0), append the number of commits and a dummy hash
VERSION_GOOD=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$') || true
VERSION_WITH_COMMIT=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$') || true
if [ $VERSION_GOOD -eq 0 -a $VERSION_WITH_COMMIT -eq 0 ]; then
die "version does not conform to (MAJOR.MINOR.PATCH[-#NUM_COMMITS-g<HASH>])"
fi
if [ $VERSION_WITH_COMMIT -eq 0 ]; then
VERSION="${VERSION}-0-g0000000"
fi
NEW_VERSION="${VERSION}"-"${BUILD_TIME}"-"${SHA}" NEW_VERSION="${VERSION}"-"${BUILD_TIME}"-"${SHA}"
sed -i "" -e "s/Version:.*/Version: ${NEW_VERSION}/g" "${PKG_ROOT}/DEBIAN/control" sed -i "" -e "s/Version:.*/Version: ${NEW_VERSION}/g" "${PKG_ROOT}/DEBIAN/control"
@ -131,8 +114,6 @@ if ! aptly repo show "${PLATFORM}" > /dev/null 2>&1; then
aptly repo create --distribution="${PLATFORM}" "${PLATFORM}" aptly repo create --distribution="${PLATFORM}" "${PLATFORM}"
fi fi
PACKAGE=$(dpkg-deb -f "${FILENAME}" Package)
aptly repo remove "${PLATFORM}" "${PACKAGE}"
aptly repo add "${PLATFORM}" "${TMP}" aptly repo add "${PLATFORM}" "${TMP}"
: "${REPO_KEYID:="D5E2DC92617877EDF7D4FD4345EA05FB7E26053D"}" : "${REPO_KEYID:="D5E2DC92617877EDF7D4FD4345EA05FB7E26053D"}"

View file

@ -6,4 +6,3 @@
3.0.0-20230101-abcd 3.0.1-20230204-bdbd 3.0.0-20230101-abcd 3.0.1-20230204-bdbd
1.5.0-20220516-a0d5a2 1.5.0-3-g26b5a59-20220527-0bc180 1.5.0-20220516-a0d5a2 1.5.0-3-g26b5a59-20220527-0bc180
1.5.0-3-g26b5a59-20220527-0bc180 1.5.1-20220527-0bc180 1.5.0-3-g26b5a59-20220527-0bc180 1.5.1-20220527-0bc180
0.1.0-20221120104301-f9e456637274844d45d9758ec661a136d0cda7966b075e4426b69fe6da00427b 0.1.0-237-g62965d4-20230527202149-6118c39221f318154e234098b5cffd4dc1d80f19cf2200cc6b1eb768dbf6decb

View file

@ -70,59 +70,46 @@ done
[ -z "${CACHE_DIR}" ] && die "The --cache-dir option must be specified" [ -z "${CACHE_DIR}" ] && die "The --cache-dir option must be specified"
[ -z "${DATA_DIR}" ] && die "The --data-dir option must be specified" [ -z "${DATA_DIR}" ] && die "The --data-dir option must be specified"
info "processing UUID '${UUID}'" info "processing UUID '$UUID'"
DB="${DATA_DIR}/builder.sqlite3" DB="${DATA_DIR}/builder.sqlite3"
# A new visualizations.sh script may be installed during an upgrade while the
# old builder-web binary is running. In that case things can get out of sync.
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
[ "$DB_VERSION" -ne 18 ] && die "The database version should be 18. It is '$DB_VERSION'"
APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be 1234839235. It is '$APP_ID'"
get_main_binary () { get_main_binary () {
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256)) sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id AND b.main_binary = ba.id JOIN build_artifact AS ba ON ba.build = b.id AND b.main_binary = ba.id
WHERE uuid = '${UUID}';" WHERE uuid = '$UUID';"
} }
BIN="${DATA_DIR}/$(get_main_binary)" || die "Failed to get main binary from database" BIN="${DATA_DIR}/$(get_main_binary)" || die "Failed to get main binary from database"
[ -z "${BIN}" ] && die "No main-binary found in db '${DB}' for build '${UUID}'" [ -z "${BIN}" ] && die "No main-binary found in db '$DB' for build '$UUID'"
get_debug_binary () { get_debug_binary () {
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256)) sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id JOIN build_artifact AS ba ON ba.build = b.id
WHERE WHERE
uuid = '${UUID}' uuid = '$UUID'
AND ba.filepath LIKE '%.debug';" AND ba.localpath LIKE '%.debug';"
} }
DEBUG_BIN_RELATIVE="$(get_debug_binary)" || die "Failed to get debug binary from database" DEBUG_BIN_RELATIVE="$(get_debug_binary)" || die "Failed to get debug binary from database"
get_opam_switch () { get_opam_switch () {
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256)) sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id JOIN build_artifact AS ba ON ba.build = b.id
WHERE WHERE
uuid = '${UUID}' uuid = '$UUID'
AND ba.filepath = 'opam-switch';" AND ba.filepath = 'opam-switch';"
} }
OPAM_SWITCH="$(get_opam_switch)" || die "Failed to get opam switch from database" OPAM_SWITCH="$(get_opam_switch)" || die "Failed to get opam switch from database"
[ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '${DB}' for build '${UUID}'" [ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '$DB' for build '$UUID'"
OPAM_SWITCH="${DATA_DIR}/${OPAM_SWITCH}" OPAM_SWITCH="${DATA_DIR}/${OPAM_SWITCH}"
OPAM_GRAPH="opam-graph" OPAM_GRAPH="opam-graph"
MODULECTOMY="modulectomy" MODULECTOMY="modulectomy"
LATEST_TREEMAPVIZ_VERSION="$(${MODULECTOMY} --version)" || die "Failed to get modulectomy version" LATEST_TREEMAPVIZ_VERSION="$($MODULECTOMY --version)" || die "Failed to get modulectomy version"
LATEST_DEPENDENCIESVIZ_VERSION="$(${OPAM_GRAPH} --version)" || die "Failed to get opam-graph version" LATEST_DEPENDENCIESVIZ_VERSION="$($OPAM_GRAPH --version)" || die "Failed to get opam-graph version"
TREEMAP_CACHE_DIR="${CACHE_DIR}/treemap_${LATEST_TREEMAPVIZ_VERSION}" TREEMAP_CACHE_DIR="${CACHE_DIR}/treemap_${LATEST_TREEMAPVIZ_VERSION}"
DEPENDENCIES_CACHE_DIR="${CACHE_DIR}/dependencies_${LATEST_DEPENDENCIESVIZ_VERSION}" DEPENDENCIES_CACHE_DIR="${CACHE_DIR}/dependencies_${LATEST_DEPENDENCIESVIZ_VERSION}"
@ -149,7 +136,7 @@ trap cleanup EXIT
# /// Dependencies viz # /// Dependencies viz
if [ ! -d "${DEPENDENCIES_CACHE_DIR}" ]; then if [ ! -d "${DEPENDENCIES_CACHE_DIR}" ]; then
mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '${DEPENDENCIES_CACHE_DIR}'" mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '$DEPENDENCIES_CACHE_DIR'"
fi fi
OPAM_SWITCH_FILEPATH='opam-switch' OPAM_SWITCH_FILEPATH='opam-switch'
@ -157,8 +144,8 @@ OPAM_SWITCH_FILEPATH='opam-switch'
get_opam_switch_hash () { get_opam_switch_hash () {
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id JOIN build_artifact AS ba ON ba.build = b.id
WHERE uuid = '${UUID}' WHERE uuid = '$UUID'
AND ba.filepath = '${OPAM_SWITCH_FILEPATH}';" AND ba.filepath = '$OPAM_SWITCH_FILEPATH';"
} }
DEPENDENCIES_INPUT_HASH="$(get_opam_switch_hash)" || die "Failed to get opam-switch hash from database" DEPENDENCIES_INPUT_HASH="$(get_opam_switch_hash)" || die "Failed to get opam-switch hash from database"
@ -168,8 +155,7 @@ if [ -e "${DEPENDENCIES_VIZ_FILENAME}" ]; then
info "Dependency visualization already exists: '${DEPENDENCIES_VIZ_FILENAME}'" info "Dependency visualization already exists: '${DEPENDENCIES_VIZ_FILENAME}'"
else else
if ${OPAM_GRAPH} --output-format=html "${OPAM_SWITCH}" > "${TMPDEPENDENCIES}"; then if ${OPAM_GRAPH} --output-format=html "${OPAM_SWITCH}" > "${TMPDEPENDENCIES}"; then
cp "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}" mv "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}"
rm "${TMPDEPENDENCIES}"
else else
die "opam-graph failed to generate visualization" die "opam-graph failed to generate visualization"
fi fi
@ -187,16 +173,16 @@ stat_aux () {
fi fi
} }
SIZE="$(stat_aux "${BIN}")" SIZE="$(stat_aux "$BIN")"
if [ ! -d "${TREEMAP_CACHE_DIR}" ]; then if [ ! -d "${TREEMAP_CACHE_DIR}" ]; then
mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '${TREEMAP_CACHE_DIR}'" mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '$TREEMAP_CACHE_DIR'"
fi fi
get_debug_bin_hash () { get_debug_bin_hash () {
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
JOIN build_artifact AS ba ON ba.build = b.id JOIN build_artifact AS ba ON ba.build = b.id
WHERE uuid = '${UUID}' WHERE uuid = '$UUID'
AND ba.filepath LIKE '%.debug';" AND ba.filepath LIKE '%.debug';"
} }
@ -215,8 +201,7 @@ if [ -n "${DEBUG_BIN_RELATIVE}" ]; then
"${DEBUG_BIN}" \ "${DEBUG_BIN}" \
> "${TMPTREE}" > "${TMPTREE}"
then then
cp "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}" mv "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}"
rm "${TMPTREE}"
else else
die "modulectomy failed to generate visualization" die "modulectomy failed to generate visualization"
fi fi

View file

@ -1,30 +0,0 @@
#!/bin/sh
# Edit these values to your needs:
remote_instance=https://builds.robur.coop
local_user_pass=test:test
local_instance="http://${local_user_pass}@localhost:3000"
limit=100
curl_json () {
curl --silent --fail --location --header "Accept: application/json" "$@"
}
curl_json "${remote_instance}/" | jq -r .jobs[] | {
while read -r job_name; do
curl_json "${remote_instance}/job/${job_name}" | jq -r .builds[].uuid | {
while read -r build_uuid; do
if [ "$limit" -eq 0 ]; then
break 2;
fi
dest=$(mktemp "builder-${build_uuid}.XXXXXXXXXX")
curl --silent --fail "${remote_instance}/job/${job_name}/build/${build_uuid}/exec" > "$dest" && {
echo "Uploading $job_name build $build_uuid"
curl --data-binary "@${dest}" "${local_instance}/upload"
}
rm -f "$dest"
limit=$((limit - 1))
done
}
done
}

View file

@ -1,16 +1,9 @@
(test (test
(name test_builder_db) (name test_builder_db)
(modules test_builder_db) (modules test_builder_db)
(libraries ptime.clock.os builder_db caqti.blocking alcotest mirage-crypto-rng.unix ohex)) (libraries builder_db caqti.blocking alcotest mirage-crypto-rng.unix))
(test (test
(name markdown_to_html) (name markdown_to_html)
(modules markdown_to_html) (modules markdown_to_html)
(libraries builder_web cmarkit alcotest)) (libraries builder_web alcotest))
(test
(name router)
(modules router)
(libraries builder_web fmt dream yojson alcotest)
(preprocess
(pps ppx_deriving.std ppx_deriving_yojson)))

View file

@ -1,14 +1,14 @@
let markdown_to_html = Builder_web__Utils.md_to_html let markdown_to_html = Builder_web__Utils.Omd.html_of_string
let test_simple () = let test_simple () =
let markdown = {|# Hello world|} in let markdown = {|# Hello world|} in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "simple html" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a>Hello world</h1>\n" html) Alcotest.(check string "simple html" "<h1>Hello world</h1>\n" html)
let test_html_script () = let test_html_script () =
let markdown = {|# <script>Hello world</script>|} in let markdown = {|# <script>Hello world</script>|} in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "html script header" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a><!-- CommonMark raw HTML omitted -->Hello world<!-- CommonMark raw HTML omitted --></h1>\n" html) Alcotest.(check string "html script header" "<h1>Hello world</h1>\n" html)
let test_preserve_span_content () = let test_preserve_span_content () =
let markdown = {|* <span id="myref">My ref</span> let markdown = {|* <span id="myref">My ref</span>
@ -16,8 +16,10 @@ let test_preserve_span_content () =
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "html span content preserved" Alcotest.(check string "html span content preserved"
{|<ul> {|<ul>
<li><!-- CommonMark raw HTML omitted -->My ref<!-- CommonMark raw HTML omitted --></li> <li>My ref
<li><a href="#myref">See my ref</a> for more information</li> </li>
<li>See my ref for more information
</li>
</ul> </ul>
|} |}
html) html)
@ -25,21 +27,20 @@ let test_preserve_span_content () =
let test_remove_script () = let test_remove_script () =
let markdown = {|<script>alert(1);</script>|} in let markdown = {|<script>alert(1);</script>|} in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "html script removed" "<!-- CommonMark HTML block omitted -->\n" html) Alcotest.(check string "html script removed" "" html)
let test_list_with_html_block_and_markdown () = let test_list_with_html_block_and_markdown () =
let markdown = "* <div> Hello, World!</div> *this is not html*" in let markdown = "* <div> Hello, World!</div> *this is not html*" in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "list with html block and markdown" Alcotest.(check string "list with html block and markdown"
(*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*) (*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*) ""
"<ul>\n<li>\n<!-- CommonMark HTML block omitted -->\n</li>\n</ul>\n"
html) html)
let test_list_with_inline_html_and_markdown () = let test_list_with_inline_html_and_markdown () =
let markdown = "* <span> Hello, World!</span> *this is not html*" in let markdown = "* <span> Hello, World!</span> *this is not html*" in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "list with html block and markdown" Alcotest.(check string "list with html block and markdown"
"<ul>\n<li><!-- CommonMark raw HTML omitted --> Hello, World!<!-- CommonMark raw HTML omitted --> <em>this is not html</em></li>\n</ul>\n" "<ul>\n<li> Hello, World! <em>this is not html</em>\n</li>\n</ul>\n"
html) html)
let test_absolute_link () = let test_absolute_link () =
@ -50,131 +51,35 @@ let test_absolute_link () =
let test_relative_link () = let test_relative_link () =
let markdown = "[foo](../foo.jpg)" in let markdown = "[foo](../foo.jpg)" in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "relative link" "<p><a href=\"../foo.jpg\">foo</a></p>\n" html) Alcotest.(check string "relative link" "<p>foo</p>\n" html)
let test_absolute_image () = let test_absolute_image () =
let markdown = "![alttext](https://foo.com/bar.jpg)" in let markdown = "![alttext](https://foo.com/bar.jpg)" in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "absolute image" Alcotest.(check string "absolute image"
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" ></p>\n" html) "<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" /></p>\n" html)
let test_absolute_image_no_alt () = let test_absolute_image_no_alt () =
let markdown = "![](https://foo.com/bar.jpg)" in let markdown = "![](https://foo.com/bar.jpg)" in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "absolute image" Alcotest.(check string "absolute image"
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html) "<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" /></p>\n" html)
let test_relative_image () = let test_relative_image () =
let markdown = "![](/bar.jpg)" in let markdown = "![](/bar.jpg)" in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "relative image" "<p><img src=\"/bar.jpg\" alt=\"\" ></p>\n" html) Alcotest.(check string "relative image" "" html)
let test_absolute_image_script_alt () = let test_absolute_image_script_alt () =
let markdown = "![<script src=\"bla.js\"></script>](https://foo.com/bar.jpg)" in let markdown = "![<script src=\"bla.js\"></script>](https://foo.com/bar.jpg)" in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "absolute image with script alt text" Alcotest.(check string "absolute image with script alt text"
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html) "<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" /></p>\n" html)
let test_fragment_link () = let test_fragment_link () =
let markdown = "[fragment](#fragment)" in let markdown = "[fragment](#fragment)" in
let html = markdown_to_html markdown in let html = markdown_to_html markdown in
Alcotest.(check string "fragment link" "<p><a href=\"#fragment\">fragment</a></p>\n" html) Alcotest.(check string "fragment link" "<p>fragment</p>\n" html)
let test_heading_adjustment () =
let markdown = {|# foo
## bar
# baz
## bazbar
### bazbarbar
#### bazbarbarbar
##### bazbarbarbarbar
###### bazbarbarbarbarbar
|}
in
let html = markdown_to_html ~adjust_heading:2 markdown in
(* NB: the maximum heading is 6 in cmarkit, thus we reduce the structure *)
let exp = {|<h3 id="foo"><a class="anchor" aria-hidden="true" href="#foo"></a>foo</h3>
<h4 id="bar"><a class="anchor" aria-hidden="true" href="#bar"></a>bar</h4>
<h3 id="baz"><a class="anchor" aria-hidden="true" href="#baz"></a>baz</h3>
<h4 id="bazbar"><a class="anchor" aria-hidden="true" href="#bazbar"></a>bazbar</h4>
<h5 id="bazbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbar"></a>bazbarbar</h5>
<h6 id="bazbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbar"></a>bazbarbarbar</h6>
<h6 id="bazbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbar"></a>bazbarbarbarbar</h6>
<h6 id="bazbarbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbarbar"></a>bazbarbarbarbarbar</h6>
|} in
Alcotest.(check string "header adjustment works fine" exp html)
let test_table () =
let markdown = {__|| a | | b | c | d | e |
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
| entry | | **bla.file** | **other.file** | | |
| _another entry_ | | **another.file** | **another.other** | | |
|__}
in
let html = markdown_to_html ~adjust_heading:2 markdown in
let exp = {|<div role="region"><table>
<tr>
<th>a</th>
<th></th>
<th>b</th>
<th>c</th>
<th>d</th>
<th>e</th>
</tr>
<tr>
<td>entry</td>
<td></td>
<td><strong>bla.file</strong></td>
<td><strong>other.file</strong></td>
<td></td>
<td></td>
</tr>
<tr>
<td><em>another entry</em></td>
<td></td>
<td><strong>another.file</strong></td>
<td><strong>another.other</strong></td>
<td></td>
<td></td>
</tr>
</table></div>|} in
Alcotest.(check string "table is rendered as html" exp html)
let test_table2 () =
let markdown = {__|| a | | b | c | d | e |
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
| entry | | | | **bla.file** | **other.file** |
| _another entry_ | | | **another.file** | **another.other** | |
|__}
in
let html = markdown_to_html ~adjust_heading:2 markdown in
let exp = {|<div role="region"><table>
<tr>
<th>a</th>
<th></th>
<th>b</th>
<th>c</th>
<th>d</th>
<th>e</th>
</tr>
<tr>
<td>entry</td>
<td></td>
<td></td>
<td></td>
<td><strong>bla.file</strong></td>
<td><strong>other.file</strong></td>
</tr>
<tr>
<td><em>another entry</em></td>
<td></td>
<td></td>
<td><strong>another.file</strong></td>
<td><strong>another.other</strong></td>
<td></td>
</tr>
</table></div>|} in
Alcotest.(check string "table is rendered as html" exp html)
let markdown_tests = [ let markdown_tests = [
Alcotest.test_case "Simple" `Quick test_simple; Alcotest.test_case "Simple" `Quick test_simple;
@ -190,9 +95,6 @@ let markdown_tests = [
Alcotest.test_case "relative image" `Quick test_relative_image; Alcotest.test_case "relative image" `Quick test_relative_image;
Alcotest.test_case "absolute image with script alt" `Quick test_absolute_image_script_alt; Alcotest.test_case "absolute image with script alt" `Quick test_absolute_image_script_alt;
Alcotest.test_case "fragment link" `Quick test_fragment_link; Alcotest.test_case "fragment link" `Quick test_fragment_link;
Alcotest.test_case "heading adjustment" `Quick test_heading_adjustment;
Alcotest.test_case "table" `Quick test_table;
Alcotest.test_case "table2" `Quick test_table2;
] ]
let () = let () =

View file

@ -1,191 +0,0 @@
module Param_verification = struct
(*> None is 'verified'*)
type t = wrong_type option
[@@deriving yojson,show,eq]
and wrong_type = {
param : string;
expected : string;
}
let alcotyp = Alcotest.testable pp equal
module P = struct
let is_string : (string * string) -> _ option =
Fun.const None
let is_uuid (param, value) =
match Uuidm.of_string value with
| Some _ when String.length value = 36 -> None
| _ -> Some {
param;
expected = "Uuidm.t"
}
end
let verify parameters req =
let verified_params =
List.fold_left (fun acc p ->
match acc with
| None ->
if String.starts_with ~prefix:"build" p then
P.is_uuid (p, Dream.param req p)
else
P.is_string (p, Dream.param req p)
| Some _ as x -> x)
None parameters
in
let response_json =
verified_params |> to_yojson |> Yojson.Safe.to_string
in
Dream.respond response_json
end
let find_parameters path =
List.filter_map (fun s ->
if String.length s > 0 && String.get s 0 = ':' then
Some (String.sub s 1 (String.length s - 1))
else
None)
(String.split_on_char '/' path)
let router =
(* XXX: this relies on [Builder_web.routes] only using {data,cache,config}dir
* in the handlers which are never called here. The path /nonexistant is
* assumed to not exist. *)
let nodir = Fpath.v "/nonexistant" in
Builder_web.routes ~datadir:nodir ~cachedir:nodir ~configdir:nodir ~expired_jobs:0
|> List.map (fun (meth, route, _handler) ->
meth, route, Param_verification.verify (find_parameters route))
|> Builder_web.to_dream_routes
|> Dream.router
(* XXX: we test without remove_trailing_url_slash to ensure we don't produce
* urls with trailing slashes: *)
(* |> Builder_web.Middleware.remove_trailing_url_slash *)
|> Dream.test
let test_link method_ target () =
let req = Dream.request ~method_ ~target "" in
let resp = router req in
let status_code = Dream.(status resp |> status_to_int) in
Alcotest.(check' int ~msg:"status-code" ~actual:status_code ~expected:200);
let body =
Dream.body resp
|> Lwt_main.run
|> Yojson.Safe.from_string
|> Param_verification.of_yojson
in
Alcotest.(check' (result Param_verification.alcotyp string) ~msg:"param-verification"
~actual:body ~expected:(Ok None))
let test_link_artifact artifact =
let job_name = "test" in
let build = Uuidm.v4_gen (Random.State.make_self_init ()) () in
test_link `GET @@
Builder_web.Link.Job_build_artifact.make ~job_name ~build ~artifact ()
let () =
Alcotest.run "Router" [
"Link module synced", Alcotest.[
test_case "Link.Root.make" `Quick begin
test_link `GET @@ Builder_web.Link.Root.make ()
end;
test_case "Link.Job.make" `Quick begin
let queries = [ `Platform "test" ] in
let job_name = "test" in
test_link `GET @@ Builder_web.Link.Job.make ~queries ~job_name ()
end;
test_case "Link.Job.make_failed" `Quick begin
let queries = [ `Platform "test" ] in
let job_name = "test" in
test_link `GET @@
Builder_web.Link.Job.make_failed ~queries ~job_name ()
end;
test_case "Link.Job_build.make" `Quick begin
let job_name = "test" in
let build = Uuidm.v4_gen (Random.State.make_self_init ()) () in
test_link `GET @@ Builder_web.Link.Job_build.make ~job_name ~build ()
end;
test_case "Link.Job_build_artifact.make_from_string" `Quick begin
let job_name = "test" in
let build = Uuidm.v4_gen (Random.State.make_self_init ()) () in
let artifact = "" in
test_link `GET @@
Builder_web.Link.Job_build_artifact.make_from_string
~job_name ~build ~artifact ()
end;
] @ Alcotest.(
[
`Main_binary;
`Viz_treemap;
`Viz_dependencies;
`Script;
`Console;
`All_targz;
`File Fpath.(v "some" / "path");
]
|> List.map (fun artifact ->
let descr =
Fmt.str "Job_build_artifact.make: %s" @@
Builder_web.Link.Job_build_artifact.encode_artifact artifact
in
test_case descr `Quick begin
test_link_artifact artifact
end
)
) @ Alcotest.[
test_case "Link.Compare_builds.make" `Quick begin
let left = Uuidm.v4_gen (Random.State.make_self_init ()) () in
let right = Uuidm.v4_gen (Random.State.make_self_init ()) () in
test_link `GET @@
Builder_web.Link.Compare_builds.make ~left ~right ()
end;
test_case "Link.Failed_builds.make" `Quick begin
test_link `GET @@
Builder_web.Link.Failed_builds.make ~count:2 ~start:1 ()
end;
];
(* this doesn't actually test the redirects, unfortunately *)
"Latest", List.map (fun p -> Alcotest.(test_case (""^p) `Quick begin
test_link `GET @@ "/job/test/build/latest" ^ p end))
[
"/f/bin/unikernel.hvt";
"/";
"";
];
"Albatross hardcoded links",
[
(*> Note: these links can be found in
albatross/command-line/albatross_client_update.ml
.. to find them I follewed the trails of 'Albatross_cli.http_host'
*)
begin
let sha_str =
Digestif.SHA256.(to_raw_string (digest_string "foo"))
|> Ohex.encode
in
Fmt.str "/hash?sha256=%s" sha_str
end;
begin
let jobname = "foo" in
"/job/" ^ jobname ^ "/build/latest"
end;
begin
let job = "foo" in
let build = Uuidm.(v4_gen (Random.State.make_self_init ()) () |> to_string) in
"/job/" ^ job ^ "/build/" ^ build ^ "/main-binary"
end;
begin
let old_uuid = Uuidm.(v4_gen (Random.State.make_self_init ()) () |> to_string) in
let new_uuid = Uuidm.(v4_gen (Random.State.make_self_init ()) () |> to_string) in
Fmt.str "/compare/%s/%s" old_uuid new_uuid
end;
]
|> List.map Alcotest.(fun p ->
test_case ("" ^ p) `Quick (test_link `GET p))
]

View file

@ -3,7 +3,7 @@ let ( >>| ) x f = Result.map f x
module type CONN = Caqti_blocking.CONNECTION module type CONN = Caqti_blocking.CONNECTION
let () = Mirage_crypto_rng_unix.use_default () let () = Mirage_crypto_rng_unix.initialize ()
let iter f xs = List.fold_left (fun r x -> r >>= fun () -> f x) (Ok ()) xs let iter f xs = List.fold_left (fun r x -> r >>= fun () -> f x) (Ok ()) xs
let get_opt message = function let get_opt message = function
@ -25,8 +25,8 @@ module Testable = struct
x.restricted = y.restricted && x.restricted = y.restricted &&
match x.password_hash, y.password_hash with match x.password_hash, y.password_hash with
| `Scrypt (hash, salt, params), `Scrypt (hash', salt', params') -> | `Scrypt (hash, salt, params), `Scrypt (hash', salt', params') ->
String.equal hash hash' && Cstruct.equal hash hash' &&
String.equal salt salt' && Cstruct.equal salt salt' &&
params = params' params = params'
in in
let pp ppf { Builder_web_auth.username; password_hash; restricted } = let pp ppf { Builder_web_auth.username; password_hash; restricted } =
@ -34,7 +34,7 @@ module Testable = struct
| `Scrypt (hash, salt, { Builder_web_auth.scrypt_n; scrypt_r; scrypt_p }) -> | `Scrypt (hash, salt, { Builder_web_auth.scrypt_n; scrypt_r; scrypt_p }) ->
Format.fprintf ppf "user:%s;(%d,%d,%d);%B;%a;%a" username Format.fprintf ppf "user:%s;(%d,%d,%d);%B;%a;%a" username
scrypt_n scrypt_r scrypt_p restricted scrypt_n scrypt_r scrypt_p restricted
Ohex.pp hash Ohex.pp salt Cstruct.hexdump_pp hash Cstruct.hexdump_pp salt
in in
Alcotest.testable Alcotest.testable
pp pp
@ -43,15 +43,18 @@ module Testable = struct
let file = let file =
let equal (x : Builder_db.Rep.file) (y : Builder_db.Rep.file) = let equal (x : Builder_db.Rep.file) (y : Builder_db.Rep.file) =
Fpath.equal x.filepath y.filepath && Fpath.equal x.filepath y.filepath &&
String.equal x.sha256 y.sha256 && Fpath.equal x.localpath y.localpath &&
Cstruct.equal x.sha256 y.sha256 &&
x.size = y.size x.size = y.size
in in
let pp ppf { Builder_db.Rep.filepath; sha256; size } = let pp ppf { Builder_db.Rep.filepath; localpath; sha256; size } =
Format.fprintf ppf "{@[<v 1>@;<1 0>Builder_db.Rep.filepath = %a;@;<1 0>\ Format.fprintf ppf "{@[<v 1>@;<1 0>Builder_db.Rep.filepath = %a;@;<1 0>\
localpath = %a;@;<1 0>\
sha256 = %a;@;<1 0>\ sha256 = %a;@;<1 0>\
size = %d;@;<1 0>\ size = %d;@;<1 0>\
@]@,}" @]@,}"
Fpath.pp filepath Ohex.pp sha256 size Fpath.pp filepath Fpath.pp localpath
Cstruct.hexdump_pp sha256 size
in in
Alcotest.testable pp equal Alcotest.testable pp equal
@ -123,20 +126,21 @@ let test_user_unauth (module Db : CONN) =
let job_name = "test-job" let job_name = "test-job"
let script = Fpath.v "/dev/null" let script = Fpath.v "/dev/null"
let uuid = Uuidm.v4_gen (Random.State.make_self_init ()) () let uuid = Uuidm.v `V4
let console = Fpath.v "/dev/null" let console = Fpath.v "/dev/null"
let start = Option.get (Ptime.of_float_s 0.) let start = Option.get (Ptime.of_float_s 0.)
let finish = Option.get (Ptime.of_float_s 1.) let finish = Option.get (Ptime.of_float_s 1.)
let result = Builder.Exited 0 let result = Builder.Exited 0
let main_binary = let main_binary =
let filepath = Result.get_ok (Fpath.of_string "bin/hello.sh") in let filepath = Result.get_ok (Fpath.of_string "bin/hello.sh") in
let localpath = Result.get_ok (Fpath.of_string "/dev/null") in
let data = "#!/bin/sh\necho Hello, World\n" in let data = "#!/bin/sh\necho Hello, World\n" in
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
let size = String.length data in let size = String.length data in
{ Builder_db.Rep.filepath; sha256; size } { Builder_db.Rep.filepath; localpath; sha256; size }
let main_binary2 = let main_binary2 =
let data = "#!/bin/sh\necho Hello, World 2\n" in let data = "#!/bin/sh\necho Hello, World 2\n" in
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
let size = String.length data in let size = String.length data in
{ main_binary with sha256 ; size } { main_binary with sha256 ; size }
let platform = "exotic-os" let platform = "exotic-os"
@ -145,17 +149,21 @@ let fail_if_none a =
Option.to_result ~none:(`Msg "Failed to retrieve") a Option.to_result ~none:(`Msg "Failed to retrieve") a
let add_test_build user_id (module Db : CONN) = let add_test_build user_id (module Db : CONN) =
let open Builder_db in let r =
Db.start () >>= fun () -> let open Builder_db in
Db.exec Job.try_add job_name >>= fun () -> Db.start () >>= fun () ->
Db.find_opt Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id -> Db.exec Job.try_add job_name >>= fun () ->
Db.exec Build.add { Build.uuid; start; finish; result; console; script; platform; Db.find_opt Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
main_binary = None; input_id = None; user_id; job_id } >>= fun () -> Db.exec Build.add { Build.uuid; start; finish; result; console; script; platform;
Db.find last_insert_rowid () >>= fun id -> main_binary = None; input_id = None; user_id; job_id } >>= fun () ->
Db.exec Build_artifact.add (main_binary, id) >>= fun () -> Db.find last_insert_rowid () >>= fun id ->
Db.find last_insert_rowid () >>= fun main_binary_id -> Db.exec Build_artifact.add (main_binary, id) >>= fun () ->
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () -> Db.find last_insert_rowid () >>= fun main_binary_id ->
Db.commit () Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
Db.commit ()
in
Result.fold ~ok:Result.ok ~error:(fun _ -> Db.rollback ())
r
let with_build_db f () = let with_build_db f () =
or_fail or_fail
@ -195,7 +203,7 @@ let test_build_get_all (module Db : CONN) =
Db.collect_list Builder_db.Build.get_all job_id >>| fun builds -> Db.collect_list Builder_db.Build.get_all job_id >>| fun builds ->
Alcotest.(check int) "one build" (List.length builds) 1 Alcotest.(check int) "one build" (List.length builds) 1
let uuid' = Uuidm.v4_gen (Random.State.make_self_init ()) () let uuid' = Uuidm.v `V4
let start' = Option.get (Ptime.of_float_s 3600.) let start' = Option.get (Ptime.of_float_s 3600.)
let finish' = Option.get (Ptime.of_float_s 3601.) let finish' = Option.get (Ptime.of_float_s 3601.)
@ -219,7 +227,7 @@ let test_build_get_latest (module Db : CONN) =
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id -> Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
Db.find_opt Builder_db.Build.get_latest_successful_with_binary (job_id, platform) Db.find_opt Builder_db.Build.get_latest_successful_with_binary (job_id, platform)
>>| get_opt "no latest build" >>| fun (_id, meta, main_binary') -> >>| get_opt "no latest build" >>| fun (_id, meta, main_binary') ->
Alcotest.(check Testable.file) "same main binary" main_binary2 main_binary'; Alcotest.(check (option Testable.file)) "same main binary" main_binary' (Some main_binary2);
Alcotest.(check Testable.uuid) "same uuid" meta.uuid uuid' Alcotest.(check Testable.uuid) "same uuid" meta.uuid uuid'
let test_build_get_previous (module Db : CONN) = let test_build_get_previous (module Db : CONN) =
@ -261,14 +269,6 @@ let test_artifact_get_by_build_uuid (module Db : CONN) =
get_opt "no build" >>| fun (_id, file) -> get_opt "no build" >>| fun (_id, file) ->
Alcotest.(check Testable.file) "same file" file main_binary Alcotest.(check Testable.file) "same file" file main_binary
let test_artifact_exists_true (module Db : CONN) =
Db.find Builder_db.Build_artifact.exists main_binary.sha256 >>| fun exists ->
Alcotest.(check bool) "main binary exists" true exists
let test_artifact_exists_false (module Db : CONN) =
Db.find Builder_db.Build_artifact.exists main_binary2.sha256 >>| fun exists ->
Alcotest.(check bool) "main binary2 doesn't exists" false exists
(* XXX: This test should fail because main_binary on the corresponding build (* XXX: This test should fail because main_binary on the corresponding build
* references its main_binary. This is not the case now due to foreign key. *) * references its main_binary. This is not the case now due to foreign key. *)
let test_artifact_remove_by_build (module Db : CONN) = let test_artifact_remove_by_build (module Db : CONN) =
@ -276,39 +276,6 @@ let test_artifact_remove_by_build (module Db : CONN) =
get_opt "no build" >>= fun (id, _build) -> get_opt "no build" >>= fun (id, _build) ->
Db.exec Builder_db.Build_artifact.remove_by_build id Db.exec Builder_db.Build_artifact.remove_by_build id
let test_get_builds_older_than (module Db : CONN) =
add_second_build (module Db) >>= fun () ->
let date = Option.get (Ptime.of_float_s (3600. /. 2.)) in
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, date) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "last build" builds [ uuid ];
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, Ptime_clock.now ()) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
(* NOTE(dinosaure): from the most recent to the older. *)
Alcotest.(check (list Testable.uuid)) "last builds" builds [ uuid'; uuid ];
Ok ()
let test_builds_excluding_latest_n (module Db : CONN) =
add_second_build (module Db) >>= fun () ->
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 1) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "keep recent build" builds [ uuid ];
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 2) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "keep 2 builds" builds [];
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 3) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "last more builds than we have" builds [];
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 0) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "delete all builds" builds [ uuid'; uuid ];
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, -1) >>= fun builds ->
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
Alcotest.(check (list Testable.uuid)) "test an incomprehensible argument (-1)" builds [ uuid'; uuid ];
Ok ()
let () = let () =
let open Alcotest in let open Alcotest in
Alcotest.run "Builder_db" [ Alcotest.run "Builder_db" [
@ -339,12 +306,6 @@ let () =
"build-artifact", [ "build-artifact", [
test_case "Get all by build" `Quick (with_build_db test_artifact_get_all_by_build); test_case "Get all by build" `Quick (with_build_db test_artifact_get_all_by_build);
test_case "Get by build uuid" `Quick (with_build_db test_artifact_get_by_build_uuid); test_case "Get by build uuid" `Quick (with_build_db test_artifact_get_by_build_uuid);
test_case "Artifact exists" `Quick (with_build_db test_artifact_exists_true);
test_case "Other artifact doesn't exists" `Quick (with_build_db test_artifact_exists_false);
test_case "Remove by build" `Quick (with_build_db test_artifact_remove_by_build); test_case "Remove by build" `Quick (with_build_db test_artifact_remove_by_build);
]; ];
"vacuum", [
test_case "Get builds older than now" `Quick (with_build_db test_get_builds_older_than);
test_case "Get older builds and keep a fixed number of then" `Quick (with_build_db test_builds_excluding_latest_n);
]
] ]