Compare commits
1 commit
Author | SHA1 | Date | |
---|---|---|---|
9abcbed299 |
59 changed files with 797 additions and 1612 deletions
26
CHANGES.md
26
CHANGES.md
|
@ -1,27 +1,3 @@
|
|||
## v0.2.0 (2024-09-05)
|
||||
|
||||
A whole slew of changes. Internally, we made a lot of incremental changes and improvements without doing a release. Thus this release is rather big. There is a lot of database migrations to apply, and unfortunately they have to be applied one at a time.
|
||||
|
||||
* Add a /failed-builds/ endpoint that lists the most recent failed builds.
|
||||
* By default don't display failed builds on the front page.
|
||||
* Times are printed with the 'Z' time zone offset indicator.
|
||||
* Link to comparisons of builds take into account whether the "input", among others the list of dependencies, is different.
|
||||
* New subcommand `builder-db extract-build` takes a build UUID and extracts the builder "full" file.
|
||||
* Add /job/<job>/build/<build>/all.tar.gz endpoint with a gzip compressed tar archive of all build artifacts.
|
||||
* Visual overhaul.
|
||||
* Add (optional) visualizations displaying package dependencies ("opam-graph") and for unikernels a "modulectomy" view of how much each OCaml module is contributing to the final binary size. The visualizations are read from a cache on disk and can be generated from a script.
|
||||
* A script hook is added on file upload. It may be used to generate visualizations or publish system packages to a repository.
|
||||
* The 404 file not found page tries to be more informative.
|
||||
* The build page for a unikernel build displays the solo5 device manifest, e.g. `with block devices "storage", and net devices "service"`.
|
||||
* URLs with trailing slash redirect to without the trailing slash.
|
||||
* Builder-web will try to be more helpful if its database doesn't exist or the database version is wrong.
|
||||
* The opam diff works for mirage 4 unikernels taking into account the opam-monorepo/duniverse packages.
|
||||
* Markdown rendering is now done using cmarkit instead of omd.
|
||||
* Builder-web doesn't display jobs older than 30 days (customizable with `--expired-jobs` command line argument) on the front page.
|
||||
* Build artifacts are stored by their content, and artifacts are automatically deduplicated. This makes builder-web much more space efficient on deployments that don't use deduplication on the filesystem level.
|
||||
* New subcommands `builder-db vacuum *` to remove older builds. Can be called from a cron job to keep disk usage bounded.
|
||||
* Lots of other improvements and bug fixes.
|
||||
|
||||
## v0.1.0 (2021-11-12)
|
||||
# v0.1.0 (2021-11-12)
|
||||
|
||||
* Initial public release
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# Builder-web - a web frontend for reproducible builds
|
||||
|
||||
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
||||
Builder-web takes in submissions of builds, typically from [builder](https://github.com/roburio/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
||||
Produced binaries can be downloaded and executed.
|
||||
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
|
||||
|
||||
## Overview
|
||||
|
||||
Builder-web is a single binary web server using a sqlite3 database with versioned schemas.
|
||||
Finished builds from [builder](https://github.com/robur-coop/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user.
|
||||
Finished builds from [builder](https://github.com/roburio/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user.
|
||||
Users can:
|
||||
|
||||
* Get an overview of *jobs* - a job is typically script or opam package that is run and builds an artifact,
|
||||
|
|
|
@ -12,9 +12,9 @@ let scrypt_params ?(scrypt_n = 16384) ?(scrypt_r = 8) ?(scrypt_p = 1) () =
|
|||
{ scrypt_n; scrypt_r; scrypt_p }
|
||||
|
||||
type pbkdf2_sha256 =
|
||||
[ `Pbkdf2_sha256 of string * string * pbkdf2_sha256_params ]
|
||||
[ `Pbkdf2_sha256 of Cstruct.t * Cstruct.t * pbkdf2_sha256_params ]
|
||||
|
||||
type scrypt = [ `Scrypt of string * string * scrypt_params ]
|
||||
type scrypt = [ `Scrypt of Cstruct.t * Cstruct.t * scrypt_params ]
|
||||
|
||||
type password_hash = [ pbkdf2_sha256 | scrypt ]
|
||||
|
||||
|
@ -25,10 +25,10 @@ type 'a user_info = {
|
|||
}
|
||||
|
||||
let pbkdf2_sha256 ~params:{ pbkdf2_sha256_iter = count } ~salt ~password =
|
||||
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password
|
||||
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
|
||||
|
||||
let scrypt ~params:{ scrypt_n = n; scrypt_r = r; scrypt_p = p } ~salt ~password =
|
||||
Scrypt.scrypt ~n ~r ~p ~dk_len:32l ~salt ~password
|
||||
Scrypt_kdf.scrypt_kdf ~n ~r ~p ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
|
||||
|
||||
let hash ?(scrypt_params=scrypt_params ())
|
||||
~username ~password ~restricted () =
|
||||
|
@ -43,10 +43,10 @@ let hash ?(scrypt_params=scrypt_params ())
|
|||
let verify_password password user_info =
|
||||
match user_info.password_hash with
|
||||
| `Pbkdf2_sha256 (password_hash, salt, params) ->
|
||||
String.equal
|
||||
Cstruct.equal
|
||||
(pbkdf2_sha256 ~params ~salt ~password)
|
||||
password_hash
|
||||
| `Scrypt (password_hash, salt, params) ->
|
||||
String.equal
|
||||
Cstruct.equal
|
||||
(scrypt ~params ~salt ~password)
|
||||
password_hash
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
(library
|
||||
(name builder_web_auth)
|
||||
(libraries kdf.pbkdf kdf.scrypt mirage-crypto-rng))
|
||||
(libraries pbkdf scrypt-kdf mirage-crypto-rng))
|
||||
|
|
|
@ -16,13 +16,6 @@ let defer_foreign_keys =
|
|||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"PRAGMA defer_foreign_keys = ON"
|
||||
|
||||
let build_artifacts_to_orphan =
|
||||
Builder_db.Rep.id `build ->* Caqti_type.octets @@
|
||||
{| SELECT a.sha256 FROM build_artifact a
|
||||
WHERE a.build = ? AND
|
||||
(SELECT COUNT(*) FROM build_artifact a2
|
||||
WHERE a2.sha256 = a.sha256 AND a2.build <> a.build) = 0 |}
|
||||
|
||||
let connect uri =
|
||||
let* (module Db : Caqti_blocking.CONNECTION) = Caqti_blocking.connect ~tweaks_version:(1,8) uri in
|
||||
let* () = Db.exec defer_foreign_keys () in
|
||||
|
@ -43,16 +36,6 @@ let do_migrate dbpath =
|
|||
let migrate () dbpath =
|
||||
or_die 1 (do_migrate dbpath)
|
||||
|
||||
let artifacts_dir datadir = Fpath.(datadir / "_artifacts")
|
||||
let artifact_path sha256 =
|
||||
let sha256 = Ohex.encode sha256 in
|
||||
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
|
||||
(* NOTE: We add the prefix to reduce the number of files in a directory - a
|
||||
workaround for inferior filesystems. We can easily revert this by changing
|
||||
this function and adding a migration. *)
|
||||
let prefix = String.sub sha256 0 2 in
|
||||
Fpath.(v "_artifacts" / prefix / sha256)
|
||||
|
||||
let user_mod action dbpath scrypt_n scrypt_r scrypt_p username unrestricted =
|
||||
let scrypt_params = Builder_web_auth.scrypt_params ?scrypt_n ?scrypt_r ?scrypt_p () in
|
||||
let r =
|
||||
|
@ -108,7 +91,7 @@ let user_disable () dbpath username =
|
|||
match user with
|
||||
| None -> Error (`Msg "user not found")
|
||||
| Some (_, user_info) ->
|
||||
let password_hash = `Scrypt ("", "", Builder_web_auth.scrypt_params ()) in
|
||||
let password_hash = `Scrypt (Cstruct.empty, Cstruct.empty, Builder_web_auth.scrypt_params ()) in
|
||||
let user_info = { user_info with password_hash ; restricted = true } in
|
||||
Db.exec Builder_db.User.update_user user_info
|
||||
in
|
||||
|
@ -148,26 +131,6 @@ let access_remove () dbpath username jobname =
|
|||
in
|
||||
or_die 1 r
|
||||
|
||||
let delete_build datadir (module Db : Caqti_blocking.CONNECTION) jobname id uuid =
|
||||
let dir = Fpath.(v datadir / jobname / Uuidm.to_string uuid) in
|
||||
(match Bos.OS.Dir.delete ~recurse:true dir with
|
||||
| Ok _ -> ()
|
||||
| Error `Msg e -> Logs.warn (fun m -> m "failed to remove build directory %a: %s" Fpath.pp dir e));
|
||||
let* () =
|
||||
Db.iter_s build_artifacts_to_orphan
|
||||
(fun sha256 ->
|
||||
let p = Fpath.(v datadir // artifact_path sha256) in
|
||||
match Bos.OS.Path.delete p with
|
||||
| Ok () -> Ok ()
|
||||
| Error `Msg e ->
|
||||
Logs.warn (fun m -> m "failed to remove orphan artifact %a: %s"
|
||||
Fpath.pp p e);
|
||||
Ok ())
|
||||
id
|
||||
in
|
||||
let* () = Db.exec Builder_db.Build_artifact.remove_by_build id in
|
||||
Db.exec Builder_db.Build.remove id
|
||||
|
||||
let job_remove () datadir jobname =
|
||||
let dbpath = datadir ^ "/builder.sqlite3" in
|
||||
let r =
|
||||
|
@ -187,7 +150,12 @@ let job_remove () datadir jobname =
|
|||
let* () =
|
||||
List.fold_left (fun r (build_id, build) ->
|
||||
let* () = r in
|
||||
delete_build datadir (module Db) jobname build_id build.Builder_db.Build.uuid)
|
||||
let dir = Fpath.(v datadir / jobname / Uuidm.to_string build.Builder_db.Build.uuid) in
|
||||
(match Bos.OS.Dir.delete ~recurse:true dir with
|
||||
| Ok _ -> ()
|
||||
| Error `Msg e -> Logs.warn (fun m -> m "failed to remove build directory %a: %s" Fpath.pp dir e));
|
||||
let* () = Db.exec Builder_db.Build_artifact.remove_by_build build_id in
|
||||
Db.exec Builder_db.Build.remove build_id)
|
||||
(Ok ())
|
||||
builds
|
||||
in
|
||||
|
@ -205,103 +173,13 @@ let job_remove () datadir jobname =
|
|||
in
|
||||
or_die 1 r
|
||||
|
||||
let vacuum datadir (module Db : Caqti_blocking.CONNECTION) platform_opt job_id predicate =
|
||||
let* jobname = Db.find Builder_db.Job.get job_id in
|
||||
let* builds =
|
||||
match predicate with
|
||||
| `Date older_than ->
|
||||
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, platform_opt, older_than)
|
||||
| `Latest latest_n ->
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, platform_opt, latest_n)
|
||||
| `Latest_successful latest_n ->
|
||||
let* latest_n =
|
||||
Db.find_opt Builder_db.Build.get_nth_latest_successful
|
||||
(job_id, platform_opt, latest_n)
|
||||
in
|
||||
match latest_n with
|
||||
| None ->
|
||||
Ok []
|
||||
| Some (id, latest_n) ->
|
||||
let+ builds =
|
||||
Db.collect_list Builder_db.Build.get_builds_older_than
|
||||
(job_id, platform_opt, latest_n.finish)
|
||||
in
|
||||
(* Unfortunately, get_builds_older_than is non-strict comparison;
|
||||
so we need to filter out [latest_n]. *)
|
||||
List.filter (fun (id', _) -> id <> id') builds
|
||||
in
|
||||
let pp_reason ppf = function
|
||||
| `Date older_than ->
|
||||
Format.fprintf ppf "has no builds older than %a" (Ptime.pp_rfc3339 ()) older_than
|
||||
| `Latest n ->
|
||||
Format.fprintf ppf "has fewer than %d builds" n
|
||||
| `Latest_successful n ->
|
||||
Format.fprintf ppf "has fewer than %d successful builds" n
|
||||
in
|
||||
if builds = [] then
|
||||
(* NOTE: this function may be called on *all* jobs, and in that case maybe
|
||||
this is too verbose? *)
|
||||
Logs.info (fun m -> m "Job %s %a; not removing any builds"
|
||||
jobname pp_reason predicate);
|
||||
List.fold_left (fun r (build_id, build) ->
|
||||
let* () = r in
|
||||
let* () = Db.start () in
|
||||
let* () = Db.exec defer_foreign_keys () in
|
||||
match
|
||||
delete_build datadir (module Db) jobname build_id
|
||||
build.Builder_db.Build.uuid
|
||||
with
|
||||
| Ok () -> Db.commit ()
|
||||
| Error _ as e ->
|
||||
let* () = Db.rollback () in
|
||||
e)
|
||||
(Ok ())
|
||||
builds
|
||||
|
||||
let vacuum () datadir platform_opt jobnames predicate =
|
||||
let dbpath = datadir ^ "/builder.sqlite3" in
|
||||
let r =
|
||||
let* (module Db : Caqti_blocking.CONNECTION) =
|
||||
connect (Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
in
|
||||
let* jobs =
|
||||
match jobnames with
|
||||
| [] ->
|
||||
(* We default to all jobs if no jobnames were specified *)
|
||||
let* jobs = Db.collect_list Builder_db.Job.get_all_with_section_synopsis () in
|
||||
Ok (List.map (fun (job_id, _, _, _) -> job_id) jobs)
|
||||
| _ :: _ ->
|
||||
let* (jobs, unknown_jobnames) =
|
||||
List.fold_left
|
||||
(fun r jobname ->
|
||||
let* (jobs, unknown_jobnames) = r in
|
||||
let* job_id_opt = Db.find_opt Builder_db.Job.get_id_by_name jobname in
|
||||
match job_id_opt with
|
||||
| Some job_id -> Ok (job_id :: jobs, unknown_jobnames)
|
||||
| None -> Ok (jobs, jobname :: unknown_jobnames))
|
||||
(Ok ([], []))
|
||||
jobnames
|
||||
in
|
||||
match unknown_jobnames with
|
||||
| [] -> Ok jobs
|
||||
| _ :: _ ->
|
||||
Error (`Msg ("Unknown job(s): " ^ String.concat ", " unknown_jobnames))
|
||||
in
|
||||
List.fold_left (fun r jobid ->
|
||||
let* () = r in
|
||||
vacuum datadir (module Db) platform_opt jobid predicate)
|
||||
(Ok ())
|
||||
jobs
|
||||
in
|
||||
or_die 1 r
|
||||
|
||||
let input_ids =
|
||||
Caqti_type.unit ->* Caqti_type.octets @@
|
||||
Caqti_type.unit ->* Builder_db.Rep.cstruct @@
|
||||
"SELECT DISTINCT input_id FROM build WHERE input_id IS NOT NULL"
|
||||
|
||||
let main_artifact_hash =
|
||||
Caqti_type.octets ->*
|
||||
Caqti_type.t3 Caqti_type.octets Builder_db.Rep.uuid Caqti_type.string @@
|
||||
Builder_db.Rep.cstruct ->*
|
||||
Caqti_type.tup3 Builder_db.Rep.cstruct Builder_db.Rep.uuid Caqti_type.string @@
|
||||
{|
|
||||
SELECT a.sha256, b.uuid, j.name FROM build_artifact a, build b, job j
|
||||
WHERE b.input_id = ? AND a.id = b.main_binary AND b.job = j.id
|
||||
|
@ -319,12 +197,12 @@ let verify_input_id () dbpath =
|
|||
match hashes with
|
||||
| (h, uuid, jobname) :: tl ->
|
||||
List.iter (fun (h', uuid', _) ->
|
||||
if String.equal h h' then
|
||||
if Cstruct.equal h h' then
|
||||
()
|
||||
else
|
||||
Logs.warn (fun m -> m "job %s input id %a with two different hashes (%a, %a), build %a and %a"
|
||||
jobname Ohex.pp input_id
|
||||
Ohex.pp h Ohex.pp h'
|
||||
jobname Cstruct.hexdump_pp input_id
|
||||
Cstruct.hexdump_pp h Cstruct.hexdump_pp h'
|
||||
Uuidm.pp uuid Uuidm.pp uuid'))
|
||||
tl
|
||||
| [] -> ())
|
||||
|
@ -336,17 +214,18 @@ let num_build_artifacts =
|
|||
Caqti_type.unit ->! Caqti_type.int @@
|
||||
"SELECT count(*) FROM build_artifact"
|
||||
|
||||
let build_artifacts : (unit, string * Uuidm.t * Fpath.t * string * int64, [ `One | `Zero | `Many ]) Caqti_request.t =
|
||||
let build_artifacts : (unit, string * Uuidm.t * (Fpath.t * Fpath.t * Cstruct.t * int64), [ `One | `Zero | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t5 string Builder_db.Rep.uuid Builder_db.Rep.fpath octets int64)
|
||||
Caqti_type.(tup3 string Builder_db.Rep.uuid
|
||||
(tup4 Builder_db.Rep.fpath Builder_db.Rep.fpath Builder_db.Rep.cstruct int64))
|
||||
@@
|
||||
{| SELECT job.name, b.uuid, a.filepath, a.sha256, a.size
|
||||
{| SELECT job.name, b.uuid, a.filepath, a.localpath, a.sha256, a.size
|
||||
FROM build_artifact a, build b, job
|
||||
WHERE a.build = b.id AND b.job = job.id |}
|
||||
|
||||
let script_and_console : (unit, _, [`One | `Zero | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 string Builder_db.Rep.uuid Builder_db.Rep.fpath Builder_db.Rep.fpath)
|
||||
Caqti_type.(tup4 string Builder_db.Rep.uuid Builder_db.Rep.fpath Builder_db.Rep.fpath)
|
||||
@@
|
||||
{| SELECT job.name, b.uuid, b.console, b.script
|
||||
FROM build b, job
|
||||
|
@ -378,33 +257,36 @@ let verify_data_dir () datadir =
|
|||
let idx = ref 0 in
|
||||
fun () -> incr idx; if !idx mod 100 = 0 then Logs.info (fun m -> m "%d" !idx);
|
||||
in
|
||||
let verify_job_and_uuid job uuid path =
|
||||
let verify_job_and_uuid ?fpath job uuid path =
|
||||
match Fpath.segs path with
|
||||
| job' :: uuid' :: _tl ->
|
||||
| job' :: uuid' :: tl ->
|
||||
if String.equal job job' then () else Logs.warn (fun m -> m "job names do not match: %s vs %s" job job');
|
||||
if String.equal (Uuidm.to_string uuid) uuid' then () else Logs.warn (fun m -> m "uuid does not match: %s vs %s" (Uuidm.to_string uuid) uuid');
|
||||
(match fpath, tl with
|
||||
| None, _ -> ()
|
||||
| Some f, "output" :: tl ->
|
||||
if Fpath.equal (Fpath.v (String.concat "/" tl)) f then
|
||||
()
|
||||
else
|
||||
Logs.err (fun m -> m "path (%a) and fpath (%a) do not match" Fpath.pp path Fpath.pp f)
|
||||
| Some _, _ ->
|
||||
Logs.err (fun m -> m "path is not of form <job>/<uuid>/output/<filename>: %a" Fpath.pp path))
|
||||
| _ -> Logs.err (fun m -> m "path is not of form <job>/<uuid>/...: %a" Fpath.pp path)
|
||||
in
|
||||
let* () =
|
||||
Db.iter_s build_artifacts (fun (_job, _uuid, _fpath, sha256, size) ->
|
||||
Db.iter_s build_artifacts (fun (job, uuid, (fpath, lpath, sha, size)) ->
|
||||
progress ();
|
||||
if not (FpathSet.mem (artifact_path sha256) !files_tracked) then
|
||||
let abs_path = Fpath.(v datadir // artifact_path sha256) in
|
||||
(match Bos.OS.File.read abs_path with
|
||||
| Error (`Msg msg) -> Logs.err (fun m -> m "file %a not present: %s" Fpath.pp abs_path msg)
|
||||
| Ok data ->
|
||||
files_tracked := FpathSet.add (artifact_path sha256) !files_tracked;
|
||||
let s = Int64.of_int (String.length data) in
|
||||
if s <> size then Logs.err (fun m -> m "File %a has different size (in DB %Lu on disk %Lu)" Fpath.pp abs_path size s);
|
||||
let sha256' = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
||||
if not (String.equal sha256 sha256') then
|
||||
Logs.err (fun m -> m "File %a has different hash (in DB %a on disk %a)"
|
||||
Fpath.pp abs_path
|
||||
Ohex.pp sha256
|
||||
Ohex.pp sha256')) ;
|
||||
Ok ()
|
||||
else
|
||||
Ok ()
|
||||
verify_job_and_uuid ~fpath job uuid lpath;
|
||||
let abs_path = Fpath.(v datadir // lpath) in
|
||||
(match Bos.OS.File.read abs_path with
|
||||
| Error (`Msg msg) -> Logs.err (fun m -> m "file %a not present: %s" Fpath.pp abs_path msg)
|
||||
| Ok data ->
|
||||
files_tracked := FpathSet.add lpath !files_tracked;
|
||||
let s = Int64.of_int (String.length data) in
|
||||
if s <> size then Logs.err (fun m -> m "File %a has different size (in DB %Lu on disk %Lu)" Fpath.pp abs_path size s);
|
||||
let sh = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||
if not (Cstruct.equal sha sh) then Logs.err (fun m -> m "File %a has different hash (in DB %a on disk %a" Fpath.pp abs_path Cstruct.hexdump_pp sha Cstruct.hexdump_pp sh)) ;
|
||||
Ok ()
|
||||
) ()
|
||||
in
|
||||
Db.iter_s script_and_console (fun (job, uuid, console, script) ->
|
||||
|
@ -424,7 +306,7 @@ let verify_data_dir () datadir =
|
|||
files_untracked;
|
||||
or_die 1 r
|
||||
|
||||
module Verify_cache_dir = struct
|
||||
module Verify_cache_dir = struct
|
||||
|
||||
let verify_dir_exists d =
|
||||
let* dir_exists = Bos.OS.Dir.exists d in
|
||||
|
@ -440,7 +322,7 @@ module Verify_cache_dir = struct
|
|||
let string_is_int s = match int_of_string_opt s with
|
||||
| None -> false
|
||||
| Some _ -> true
|
||||
|
||||
|
||||
let verify_cache_subdir ~cachedir d =
|
||||
match Bos.OS.Dir.exists Fpath.(cachedir // d) with
|
||||
| Ok false -> ()
|
||||
|
@ -455,7 +337,7 @@ module Verify_cache_dir = struct
|
|||
let prefix = viz_prefix ^ "_" in
|
||||
let has_prefix = String.starts_with ~prefix dir_str in
|
||||
let has_valid_ending =
|
||||
if not has_prefix then false else
|
||||
if not has_prefix then false else
|
||||
let ending =
|
||||
String.(sub dir_str
|
||||
(length prefix)
|
||||
|
@ -471,7 +353,7 @@ module Verify_cache_dir = struct
|
|||
m "Invalid cache subdirectory name: '%s'" dir_str)
|
||||
|
||||
let get_latest_viz_version viz_typ =
|
||||
let* v_str, run_status = begin match viz_typ with
|
||||
let* v_str, run_status = begin match viz_typ with
|
||||
| `Treemap ->
|
||||
let cmd = Bos.Cmd.(v "modulectomy" % "--version") in
|
||||
Bos.OS.Cmd.(cmd |> run_out |> out_string)
|
||||
|
@ -480,7 +362,7 @@ module Verify_cache_dir = struct
|
|||
Bos.OS.Cmd.(cmd |> run_out |> out_string)
|
||||
end in
|
||||
match run_status with
|
||||
| (cmd_info, `Exited 0) ->
|
||||
| (cmd_info, `Exited 0) ->
|
||||
begin try Ok (int_of_string v_str) with Failure _ ->
|
||||
let msg =
|
||||
Fmt.str "Couldn't parse latest version from %a: '%s'"
|
||||
|
@ -490,7 +372,7 @@ module Verify_cache_dir = struct
|
|||
Error (`Msg msg)
|
||||
end
|
||||
| (cmd_info, _) ->
|
||||
let msg =
|
||||
let msg =
|
||||
Fmt.str "Error running visualization cmd: '%a'"
|
||||
Bos.Cmd.pp (Bos.OS.Cmd.run_info_cmd cmd_info)
|
||||
in
|
||||
|
@ -544,8 +426,8 @@ module Verify_cache_dir = struct
|
|||
type t = {
|
||||
uuid : Uuidm.t;
|
||||
job_name : string;
|
||||
hash_opam_switch : string option;
|
||||
hash_debug_bin : string option;
|
||||
hash_opam_switch : Cstruct.t option;
|
||||
hash_debug_bin : Cstruct.t option;
|
||||
}
|
||||
|
||||
let repr =
|
||||
|
@ -556,11 +438,11 @@ module Verify_cache_dir = struct
|
|||
in
|
||||
Caqti_type.custom ~encode ~decode
|
||||
Caqti_type.(
|
||||
t4
|
||||
tup4
|
||||
Builder_db.Rep.uuid
|
||||
string
|
||||
(option octets)
|
||||
(option octets))
|
||||
(option Builder_db.Rep.cstruct)
|
||||
(option Builder_db.Rep.cstruct))
|
||||
|
||||
end
|
||||
|
||||
|
@ -572,13 +454,12 @@ module Verify_cache_dir = struct
|
|||
ba_opam_switch.sha256 hash_opam_switch,
|
||||
ba_debug_bin.sha256 hash_debug_bin
|
||||
FROM build AS b
|
||||
WHERE b.main_binary IS NOT NULL
|
||||
LEFT JOIN build_artifact AS ba_opam_switch ON
|
||||
ba_opam_switch.build = b.id
|
||||
AND ba_opam_switch.filepath = 'opam-switch'
|
||||
LEFT JOIN build_artifact AS ba_debug_bin ON
|
||||
ba_debug_bin.build = b.id
|
||||
AND ba_debug_bin.filepath LIKE '%.debug'
|
||||
AND ba_debug_bin.localpath LIKE '%.debug'
|
||||
|}
|
||||
|
||||
let check_viz_nonempty ~cachedir ~viz_typ ~hash =
|
||||
|
@ -586,7 +467,7 @@ module Verify_cache_dir = struct
|
|||
let* latest_version =
|
||||
Viz_aux.get_viz_version_from_dirs ~cachedir ~viz_typ
|
||||
in
|
||||
let viz_input_hash = Ohex.encode hash in
|
||||
let `Hex viz_input_hash = Hex.of_cstruct hash in
|
||||
let* viz_path =
|
||||
Viz_aux.choose_versioned_viz_path
|
||||
~cachedir
|
||||
|
@ -601,7 +482,7 @@ module Verify_cache_dir = struct
|
|||
|
||||
let verify_viz_file_vizdeps ~cachedir build =
|
||||
match build.Build.hash_opam_switch with
|
||||
| None ->
|
||||
| None ->
|
||||
Logs.warn (fun m ->
|
||||
m "%s: uuid '%a': Doesn't support dependencies viz because of \
|
||||
missing 'opam-switch'"
|
||||
|
@ -610,7 +491,7 @@ module Verify_cache_dir = struct
|
|||
| Some hash_opam_switch ->
|
||||
match
|
||||
check_viz_nonempty
|
||||
~cachedir
|
||||
~cachedir
|
||||
~viz_typ:`Dependencies
|
||||
~hash:hash_opam_switch
|
||||
with
|
||||
|
@ -631,7 +512,7 @@ module Verify_cache_dir = struct
|
|||
~cachedir
|
||||
~viz_typ:`Treemap
|
||||
~hash:hash_debug_bin
|
||||
with
|
||||
with
|
||||
| Ok () -> ()
|
||||
| Error (`Msg err) ->
|
||||
Logs.warn (fun m ->
|
||||
|
@ -666,7 +547,7 @@ module Verify_cache_dir = struct
|
|||
match extract_hash ~viz_typ build with
|
||||
| None -> ()
|
||||
| Some input_hash ->
|
||||
let input_hash = Ohex.encode input_hash in
|
||||
let `Hex input_hash = Hex.of_cstruct input_hash in
|
||||
let viz_path = Viz_aux.viz_path
|
||||
~cachedir
|
||||
~viz_typ
|
||||
|
@ -686,18 +567,18 @@ module Verify_cache_dir = struct
|
|||
Fpath.pp viz_path)
|
||||
|
||||
type msg = [ `Msg of string ]
|
||||
|
||||
|
||||
let open_error_msg : ('a, msg) result -> ('a, [> msg]) result =
|
||||
function
|
||||
| Ok _ as v -> v
|
||||
| Error e -> Error (e : msg :> [> msg])
|
||||
|
||||
|
||||
let verify () datadir cachedir =
|
||||
let module Viz_aux = Builder_web.Viz_aux in
|
||||
begin
|
||||
let* datadir = Fpath.of_string datadir |> open_error_msg in
|
||||
let* cachedir = match cachedir with
|
||||
| Some d -> Fpath.of_string d |> open_error_msg
|
||||
| Some d -> Fpath.of_string d |> open_error_msg
|
||||
| None -> Ok Fpath.(datadir / "_cache")
|
||||
in
|
||||
let* () = verify_dir_exists cachedir in
|
||||
|
@ -740,8 +621,8 @@ end
|
|||
module Asn = struct
|
||||
let decode_strict codec cs =
|
||||
match Asn.decode codec cs with
|
||||
| Ok (a, rest) ->
|
||||
if String.length rest = 0
|
||||
| Ok (a, cs) ->
|
||||
if Cstruct.length cs = 0
|
||||
then Ok a
|
||||
else Error "trailing bytes"
|
||||
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
||||
|
@ -799,16 +680,16 @@ let extract_full () datadir dest uuid =
|
|||
let out = console_of_string console in
|
||||
let* artifacts = Db.collect_list Builder_db.Build_artifact.get_all_by_build build_id in
|
||||
let* data =
|
||||
List.fold_left (fun acc (_, { Builder_db.filepath; sha256; _ }) ->
|
||||
List.fold_left (fun acc (_, { Builder_db.filepath; localpath; _ }) ->
|
||||
let* acc = acc in
|
||||
let* data = Bos.OS.File.read Fpath.(v datadir // artifact_path sha256) in
|
||||
let* data = Bos.OS.File.read Fpath.(v datadir // localpath) in
|
||||
Ok ((filepath, data) :: acc))
|
||||
(Ok [])
|
||||
artifacts
|
||||
in
|
||||
let exec = (job, uuid, out, start, finish, result, data) in
|
||||
let data = Builder.Asn.exec_to_str exec in
|
||||
Bos.OS.File.write (Fpath.v dest) data
|
||||
let cs = Builder.Asn.exec_to_cs exec in
|
||||
Bos.OS.File.write (Fpath.v dest) (Cstruct.to_string cs)
|
||||
in
|
||||
or_die 1 r
|
||||
|
||||
|
@ -820,89 +701,81 @@ let help man_format cmds = function
|
|||
else `Error (true, "Unknown command: " ^ cmd)
|
||||
|
||||
let dbpath =
|
||||
let doc = "sqlite3 database path." in
|
||||
let doc = "sqlite3 database path" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt non_dir_file (Builder_system.default_datadir ^ "/builder.sqlite3") &
|
||||
info ~doc ["dbpath"])
|
||||
|
||||
let dbpath_new =
|
||||
let doc = "sqlite3 database path." in
|
||||
let doc = "sqlite3 database path" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt string (Builder_system.default_datadir ^ "/builder.sqlite3") &
|
||||
info ~doc ["dbpath"])
|
||||
|
||||
let datadir =
|
||||
let doc = "Data directory." in
|
||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
|
||||
let doc = "data directory" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt dir Builder_system.default_datadir &
|
||||
info ~doc ~env ["datadir"; "d"])
|
||||
info ~doc ["datadir"; "d"])
|
||||
|
||||
let cachedir =
|
||||
let doc = "Cache directory." in
|
||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_CACHEDIR" in
|
||||
let doc = "cache directory" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some dir) None &
|
||||
info ~doc ~env ["cachedir"])
|
||||
info ~doc ["cachedir"])
|
||||
|
||||
let jobname =
|
||||
let doc = "Jobname." in
|
||||
let doc = "jobname" in
|
||||
Cmdliner.Arg.(required &
|
||||
pos 0 (some string) None &
|
||||
info ~doc ~docv:"JOBNAME" [])
|
||||
|
||||
let username =
|
||||
let doc = "Username." in
|
||||
let doc = "username" in
|
||||
Cmdliner.Arg.(required &
|
||||
pos 0 (some string) None &
|
||||
info ~doc ~docv:"USERNAME" [])
|
||||
|
||||
let password_iter =
|
||||
let doc = "Password hash count." in
|
||||
let doc = "password hash count" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some int) None &
|
||||
info ~doc ["hash-count"])
|
||||
|
||||
let scrypt_n =
|
||||
let doc = "scrypt n parameter." in
|
||||
let doc = "scrypt n parameter" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some int) None &
|
||||
info ~doc ["scrypt-n"])
|
||||
|
||||
let scrypt_r =
|
||||
let doc = "scrypt r parameter." in
|
||||
let doc = "scrypt r parameter" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some int) None &
|
||||
info ~doc ["scrypt-r"])
|
||||
|
||||
let scrypt_p =
|
||||
let doc = "scrypt p parameter." in
|
||||
let doc = "scrypt p parameter" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some int) None &
|
||||
info ~doc ["scrypt-p"])
|
||||
|
||||
let unrestricted =
|
||||
let doc = "Unrestricted user." in
|
||||
let doc = "unrestricted user" in
|
||||
Cmdliner.Arg.(value & flag & info ~doc [ "unrestricted" ])
|
||||
|
||||
let job =
|
||||
let doc = "Job." in
|
||||
let doc = "job" in
|
||||
Cmdliner.Arg.(required &
|
||||
pos 1 (some string) None &
|
||||
info ~doc ~docv:"JOB" [])
|
||||
|
||||
let build =
|
||||
let doc = "Build uuid." in
|
||||
let doc = "build uuid" in
|
||||
Cmdliner.Arg.(required &
|
||||
pos 0 (some string) None &
|
||||
info ~doc ~docv:"BUILD" [])
|
||||
|
||||
let platform =
|
||||
let doc = "Platform." in
|
||||
Cmdliner.Arg.(value &
|
||||
opt (some string) None &
|
||||
info ~doc ~docv:"PLATFORM" ["platform"])
|
||||
|
||||
let full_dest =
|
||||
let doc = "path to write build file" in
|
||||
Cmdliner.Arg.(value & opt string "full" &
|
||||
|
@ -976,102 +849,6 @@ let job_remove_cmd =
|
|||
let info = Cmd.info ~doc "job-remove" in
|
||||
Cmd.v info term
|
||||
|
||||
let vacuum_cmd =
|
||||
let jobs =
|
||||
Arg.(value & opt_all string [] & info ~doc:"Job(s). Can be passed multiple times." ~docv:"JOB" ["job"])
|
||||
in
|
||||
let ptime_conv =
|
||||
let parse s =
|
||||
match Ptime.of_rfc3339 s with
|
||||
| Ok (ptime, (None | Some 0), _) ->
|
||||
Ok (`Date ptime)
|
||||
| Ok _ -> Error (`Msg "only UTC timezone is allowed")
|
||||
| Error `RFC3339 (_range, e) ->
|
||||
Error (`Msg (Format.asprintf "bad RFC3339 date-time: %a" Ptime.pp_rfc3339_error e))
|
||||
and pp ppf (`Date ptime) =
|
||||
Ptime.pp_rfc3339 () ppf ptime
|
||||
in
|
||||
Arg.conv (parse, pp)
|
||||
in
|
||||
let older_than =
|
||||
let doc = "cut-off date-time" in
|
||||
Arg.(required & pos 0 (some ptime_conv) None & info ~doc ~docv:"OLDER-THAN" [])
|
||||
in
|
||||
(* TODO(reynir): for now we disallow 0 so as to avoid ending up with jobs
|
||||
without builds. I'm unsure how well builder-web works with empty jobs.
|
||||
Then again we don't do this check for older-than... *)
|
||||
let latest_n =
|
||||
let doc = "latest N" in
|
||||
let latest_n =
|
||||
let parse s =
|
||||
match Arg.(conv_parser int) s with
|
||||
| Ok n when n > 0 -> Ok (`Latest n)
|
||||
| Ok _ -> Error (`Msg "must be positive integer")
|
||||
| Error _ as e -> e
|
||||
and pp ppf (`Latest n) =
|
||||
Arg.(conv_printer int) ppf n
|
||||
in
|
||||
Arg.conv (parse, pp)
|
||||
in
|
||||
Arg.(required & pos 0 (some latest_n) None & info ~doc ~docv:"LATEST-N" [])
|
||||
in
|
||||
let latest_n_succesful =
|
||||
let doc = "latest N successful" in
|
||||
let latest_n =
|
||||
let parse s =
|
||||
match Arg.(conv_parser int) s with
|
||||
| Ok n when n > 0 -> Ok (`Latest_successful n)
|
||||
| Ok _ -> Error (`Msg "must be positive integer")
|
||||
| Error _ as e -> e
|
||||
and pp ppf (`Latest_successful n) =
|
||||
Arg.(conv_printer int) ppf n
|
||||
in
|
||||
Arg.conv (parse, pp)
|
||||
in
|
||||
Arg.(required & pos 0 (some latest_n) None & info ~doc ~docv:"LATEST-N" [])
|
||||
in
|
||||
let job_default_txt =
|
||||
"By default all jobs are vacuumed, unless any jobs are specified using --job."
|
||||
in
|
||||
let vacuum_older_than =
|
||||
let doc =
|
||||
Printf.sprintf "Remove builds older than a date. %s" job_default_txt
|
||||
in
|
||||
let info = Cmd.info ~doc "older-than" in
|
||||
let term =
|
||||
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ older_than)
|
||||
in
|
||||
Cmd.v info term
|
||||
in
|
||||
let vacuum_except_latest_n =
|
||||
let doc =
|
||||
Printf.sprintf "Remove all builds except for the latest N builds (successful or not). %s"
|
||||
job_default_txt
|
||||
in
|
||||
let info = Cmd.info ~doc "except-latest" in
|
||||
let term =
|
||||
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ latest_n)
|
||||
in
|
||||
Cmd.v info term
|
||||
in
|
||||
let vacuum_except_latest_n_successful =
|
||||
let doc =
|
||||
Printf.sprintf "Remove all builds except for builds newer than the Nth latest successful build. %s"
|
||||
job_default_txt
|
||||
in
|
||||
let info = Cmd.info ~doc "except-latest-successful" in
|
||||
let term =
|
||||
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ latest_n_succesful)
|
||||
in
|
||||
Cmd.v info term
|
||||
in
|
||||
let doc = "Remove old builds" in
|
||||
Cmd.group (Cmd.info ~doc "vacuum") [
|
||||
vacuum_older_than;
|
||||
vacuum_except_latest_n;
|
||||
vacuum_except_latest_n_successful;
|
||||
]
|
||||
|
||||
let extract_full_cmd =
|
||||
let doc = "extract a build from the database" in
|
||||
let term = Term.(
|
||||
|
@ -1115,7 +892,7 @@ let default_cmd, default_info =
|
|||
Cmd.info ~doc "builder-db"
|
||||
|
||||
let () =
|
||||
Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna);
|
||||
Mirage_crypto_rng_unix.initialize ();
|
||||
Cmdliner.Cmd.group
|
||||
~default:default_cmd default_info
|
||||
[ help_cmd; migrate_cmd;
|
||||
|
@ -1125,7 +902,6 @@ let () =
|
|||
verify_input_id_cmd;
|
||||
verify_data_dir_cmd;
|
||||
verify_cache_dir_cmd;
|
||||
extract_full_cmd;
|
||||
vacuum_cmd ]
|
||||
extract_full_cmd ]
|
||||
|> Cmdliner.Cmd.eval
|
||||
|> exit
|
||||
|
|
|
@ -30,7 +30,7 @@ let write_raw s buf =
|
|||
safe_close s >|= fun () ->
|
||||
Error `Exception)
|
||||
in
|
||||
(* Logs.debug (fun m -> m "writing %a" (Ohex.pp_hexdump ()) (Bytes.unsafe_to_string buf)) ; *)
|
||||
(* Logs.debug (fun m -> m "writing %a" Cstruct.hexdump_pp (Cstruct.of_bytes buf)) ; *)
|
||||
w 0 (Bytes.length buf)
|
||||
|
||||
let process =
|
||||
|
@ -81,28 +81,28 @@ let init_influx name data =
|
|||
let run_batch_viz ~cachedir ~datadir ~configdir =
|
||||
let open Rresult.R.Infix in
|
||||
begin
|
||||
let script = Fpath.(configdir / "batch-viz.sh")
|
||||
let script = Fpath.(configdir / "batch-viz.sh")
|
||||
and script_log = Fpath.(cachedir / "batch-viz.log")
|
||||
and viz_script = Fpath.(configdir / "upload-hooks" / "visualizations.sh")
|
||||
and viz_script = Fpath.(configdir / "upload-hooks" / "visualizations.sh")
|
||||
in
|
||||
Bos.OS.File.exists script >>= fun script_exists ->
|
||||
if not script_exists then begin
|
||||
Logs.warn (fun m -> m "Didn't find %s" (Fpath.to_string script));
|
||||
Ok ()
|
||||
end else
|
||||
let args =
|
||||
let args =
|
||||
[ "--cache-dir=" ^ Fpath.to_string cachedir;
|
||||
"--data-dir=" ^ Fpath.to_string datadir;
|
||||
"--viz-script=" ^ Fpath.to_string viz_script ]
|
||||
|> List.map (fun s -> "\"" ^ String.escaped s ^ "\"")
|
||||
|> String.concat " "
|
||||
in
|
||||
(*> Note: The reason for appending, is that else a new startup could
|
||||
(*> Note: The reason for appending, is that else a new startup could
|
||||
overwrite an existing running batch's log*)
|
||||
(Fpath.to_string script ^ " " ^ args
|
||||
^ " 2>&1 >> " ^ Fpath.to_string script_log
|
||||
^ " &")
|
||||
|> Sys.command
|
||||
|> Sys.command
|
||||
|> ignore
|
||||
|> Result.ok
|
||||
end
|
||||
|
@ -113,14 +113,13 @@ let run_batch_viz ~cachedir ~datadir ~configdir =
|
|||
m "Error while starting batch-viz.sh: %a"
|
||||
Rresult.R.pp_msg err)
|
||||
|
||||
let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag expired_jobs =
|
||||
let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag =
|
||||
let dbpath = Printf.sprintf "%s/builder.sqlite3" datadir in
|
||||
let datadir = Fpath.v datadir in
|
||||
let cachedir =
|
||||
cachedir |> Option.fold ~none:Fpath.(datadir / "_cache") ~some:Fpath.v
|
||||
in
|
||||
let configdir = Fpath.v configdir in
|
||||
let () = Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna) in
|
||||
let () = init_influx "builder-web" influx in
|
||||
let () =
|
||||
if run_batch_viz_flag then
|
||||
|
@ -159,7 +158,7 @@ let setup_app level influx port host datadir cachedir configdir run_batch_viz_fl
|
|||
let error_handler = Dream.error_template Builder_web.error_template in
|
||||
Dream.initialize_log ?level ();
|
||||
let dream_routes = Builder_web.(
|
||||
routes ~datadir ~cachedir ~configdir ~expired_jobs
|
||||
routes ~datadir ~cachedir ~configdir
|
||||
|> to_dream_routes
|
||||
)
|
||||
in
|
||||
|
@ -196,11 +195,10 @@ let ip_port : (Ipaddr.V4.t * int) Arg.conv =
|
|||
let datadir =
|
||||
let doc = "data directory" in
|
||||
let docv = "DATA_DIR" in
|
||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
|
||||
Arg.(
|
||||
value &
|
||||
opt dir Builder_system.default_datadir &
|
||||
info ~env [ "d"; "datadir" ] ~doc ~docv
|
||||
info [ "d"; "datadir" ] ~doc ~docv
|
||||
)
|
||||
|
||||
let cachedir =
|
||||
|
@ -242,15 +240,11 @@ let run_batch_viz =
|
|||
log is written to CACHE_DIR/batch-viz.log" in
|
||||
Arg.(value & flag & info [ "run-batch-viz" ] ~doc)
|
||||
|
||||
let expired_jobs =
|
||||
let doc = "Amount of days after which a job is considered to be inactive if \
|
||||
no successful build has been achieved (use 0 for infinite)" in
|
||||
Arg.(value & opt int 30 & info [ "expired-jobs" ] ~doc)
|
||||
|
||||
let () =
|
||||
let term =
|
||||
Term.(const setup_app $ Logs_cli.level () $ influx $ port $ host $ datadir $
|
||||
cachedir $ configdir $ run_batch_viz $ expired_jobs)
|
||||
cachedir $ configdir $ run_batch_viz)
|
||||
in
|
||||
let info = Cmd.info "Builder web" ~doc:"Builder web" ~man:[] in
|
||||
Cmd.v info term
|
||||
|
|
7
bin/dune
7
bin/dune
|
@ -7,13 +7,10 @@
|
|||
(public_name builder-web)
|
||||
(name builder_web_app)
|
||||
(modules builder_web_app)
|
||||
(libraries builder_web builder_system mirage-crypto-rng.unix cmdliner
|
||||
logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr
|
||||
ipaddr.unix http_status_metrics))
|
||||
(libraries builder_web builder_system mirage-crypto-rng.unix cmdliner logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr ipaddr.unix http_status_metrics))
|
||||
|
||||
(executable
|
||||
(public_name builder-db)
|
||||
(name builder_db_app)
|
||||
(modules builder_db_app)
|
||||
(libraries builder_web builder_db builder_system caqti.blocking uri bos fmt
|
||||
logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))
|
||||
(libraries builder_web builder_db builder_system caqti.blocking uri bos fmt logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))
|
||||
|
|
|
@ -61,10 +61,9 @@ let help man_format migrations = function
|
|||
|
||||
let datadir =
|
||||
let doc = "data directory containing builder.sqlite3 and data files" in
|
||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
|
||||
Cmdliner.Arg.(value &
|
||||
opt dir Builder_system.default_datadir &
|
||||
info ~env ~doc ["datadir"; "d"])
|
||||
info ~doc ["datadir"; "d"])
|
||||
|
||||
let setup_log =
|
||||
let setup_log level =
|
||||
|
@ -180,8 +179,6 @@ let () =
|
|||
[ f20210910 ];
|
||||
actions (module M20211105);
|
||||
actions (module M20220509);
|
||||
actions (module M20230911);
|
||||
actions (module M20230914);
|
||||
])
|
||||
|> Cmd.eval
|
||||
|> exit
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
(executable
|
||||
(public_name builder-migrations)
|
||||
(name builder_migrations)
|
||||
(libraries builder_system builder_db caqti caqti-driver-sqlite3
|
||||
caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))
|
||||
(libraries builder_system builder_db caqti caqti-driver-sqlite3 caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))
|
||||
|
|
|
@ -18,11 +18,11 @@ let all_builds =
|
|||
"SELECT id FROM build"
|
||||
|
||||
let bin_artifact =
|
||||
Caqti_type.int64 ->* Caqti_type.(t2 int64 string) @@
|
||||
Caqti_type.int64 ->* Caqti_type.(tup2 int64 string) @@
|
||||
"SELECT id, filepath FROM build_artifact WHERE build = ? AND filepath LIKE 'bin/%'"
|
||||
|
||||
let set_main_binary =
|
||||
Caqti_type.(t2 int64 (option string)) ->. Caqti_type.unit @@
|
||||
Caqti_type.(tup2 int64 (option string)) ->. Caqti_type.unit @@
|
||||
"UPDATE build SET main_binary = $2 WHERE id = $1"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
|
|
|
@ -37,21 +37,21 @@ let new_build_file =
|
|||
|}
|
||||
|
||||
let collect_build_artifact =
|
||||
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@
|
||||
Caqti_type.unit ->* Caqti_type.(tup3 int64 (tup3 string string octets) int64) @@
|
||||
"SELECT id, filepath, localpath, sha256, build FROM build_artifact"
|
||||
|
||||
let collect_build_file =
|
||||
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@
|
||||
Caqti_type.unit ->* Caqti_type.(tup3 int64 (tup3 string string octets) int64) @@
|
||||
"SELECT id, filepath, localpath, sha256, build FROM build_file"
|
||||
|
||||
let insert_new_build_artifact =
|
||||
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@
|
||||
Caqti_type.(tup3 int64 (tup4 string string octets int64) int64) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build_artifact (id, filepath, localpath, sha256, size, build)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
|}
|
||||
|
||||
let insert_new_build_file =
|
||||
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@
|
||||
Caqti_type.(tup3 int64 (tup4 string string octets int64) int64) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build_file (id, filepath, localpath, sha256, size, build)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
|}
|
||||
|
|
|
@ -3,7 +3,7 @@ module Rep = Builder_db.Rep
|
|||
open Grej.Infix
|
||||
|
||||
let broken_builds =
|
||||
Caqti_type.unit ->* Caqti_type.t3 (Rep.id `build) Rep.uuid Caqti_type.string @@
|
||||
Caqti_type.unit ->* Caqti_type.tup3 (Rep.id `build) Rep.uuid Caqti_type.string @@
|
||||
{| SELECT b.id, b.uuid, job.name FROM build b, job
|
||||
WHERE result_kind = 0 AND result_code = 0 AND main_binary IS NOT NULL AND job.id = b.job AND
|
||||
(SELECT COUNT( * ) FROM build_artifact a
|
||||
|
|
|
@ -7,11 +7,11 @@ let rollback_doc = "add datadir prefix to build_artifact.localpath"
|
|||
open Grej.Infix
|
||||
|
||||
let build_artifacts =
|
||||
Caqti_type.unit ->* Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@
|
||||
Caqti_type.unit ->* Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@
|
||||
"SELECT id, localpath FROM build_artifact"
|
||||
|
||||
let build_artifact_update_localpath =
|
||||
Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@
|
||||
Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@
|
||||
"UPDATE build_artifact SET localpath = $2 WHERE id = $1"
|
||||
|
||||
(* We are not migrating build_file because it is unused *)
|
||||
|
|
|
@ -54,20 +54,20 @@ let old_build =
|
|||
|
||||
let collect_old_build =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
||||
(t3 (t4 string int64 int64 int64)
|
||||
(t4 int64 int (option int) (option string))
|
||||
(t3 octets string (option string)))
|
||||
Caqti_type.(tup3 Builder_db.Rep.untyped_id
|
||||
(tup3 (tup4 string int64 int64 int64)
|
||||
(tup4 int64 int (option int) (option string))
|
||||
(tup3 octets string (option string)))
|
||||
Builder_db.Rep.untyped_id) @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
|
||||
console, script, main_binary, job
|
||||
FROM build |}
|
||||
|
||||
let insert_new_build =
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
||||
(t3 (t4 string int64 int64 int64)
|
||||
(t4 int64 int (option int) (option string))
|
||||
(t3 octets string (option Builder_db.Rep.untyped_id)))
|
||||
Caqti_type.(tup3 Builder_db.Rep.untyped_id
|
||||
(tup3 (tup4 string int64 int64 int64)
|
||||
(tup4 int64 int (option int) (option string))
|
||||
(tup3 octets string (option Builder_db.Rep.untyped_id)))
|
||||
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
|
||||
result_code, result_msg, console, script, main_binary, job)
|
||||
|
@ -82,7 +82,7 @@ let rename_build =
|
|||
"ALTER TABLE new_build RENAME TO build"
|
||||
|
||||
let find_main_artifact_id =
|
||||
Caqti_type.(t2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@
|
||||
Caqti_type.(tup2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@
|
||||
"SELECT id FROM build_artifact WHERE build = $1 AND filepath = $2"
|
||||
|
||||
let find_main_artifact_filepath =
|
||||
|
@ -91,20 +91,20 @@ let find_main_artifact_filepath =
|
|||
|
||||
let collect_new_build =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
||||
(t3 (t4 string int64 int64 int64)
|
||||
(t4 int64 int (option int) (option string))
|
||||
(t3 octets string (option Builder_db.Rep.untyped_id)))
|
||||
Caqti_type.(tup3 Builder_db.Rep.untyped_id
|
||||
(tup3 (tup4 string int64 int64 int64)
|
||||
(tup4 int64 int (option int) (option string))
|
||||
(tup3 octets string (option Builder_db.Rep.untyped_id)))
|
||||
Builder_db.Rep.untyped_id) @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
|
||||
console, script, main_binary, job
|
||||
FROM build |}
|
||||
|
||||
let insert_old_build =
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
||||
(t3 (t4 string int64 int64 int64)
|
||||
(t4 int64 int (option int) (option string))
|
||||
(t3 octets string (option string)))
|
||||
Caqti_type.(tup3 Builder_db.Rep.untyped_id
|
||||
(tup3 (tup4 string int64 int64 int64)
|
||||
(tup4 int64 int (option int) (option string))
|
||||
(tup3 octets string (option string)))
|
||||
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
|
||||
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
|
||||
result_code, result_msg, console, script, main_binary, job)
|
||||
|
|
|
@ -34,21 +34,21 @@ let old_user =
|
|||
|
||||
let collect_old_user =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) @@
|
||||
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64)) @@
|
||||
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p FROM user"
|
||||
|
||||
let collect_new_user =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) @@
|
||||
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool)) @@
|
||||
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted FROM user"
|
||||
|
||||
let insert_new_user =
|
||||
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) ->.
|
||||
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool)) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
|
||||
|
||||
let insert_old_user =
|
||||
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) ->.
|
||||
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64)) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p) VALUES (?, ?, ?, ?, ?, ?, ?)"
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ let latest_successful_build =
|
|||
|
||||
let build_artifacts =
|
||||
Builder_db.Rep.untyped_id ->*
|
||||
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
Caqti_type.tup2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
{| SELECT a.filepath, a.localpath
|
||||
FROM build_artifact a
|
||||
WHERE a.build = ?
|
||||
|
@ -106,7 +106,7 @@ let insert_tag =
|
|||
"INSERT INTO tag (tag) VALUES (?)"
|
||||
|
||||
let insert_job_tag =
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
||||
Caqti_type.(tup3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ let latest_successful_build =
|
|||
|
||||
let build_artifacts =
|
||||
Builder_db.Rep.untyped_id ->*
|
||||
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
Caqti_type.tup2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
{| SELECT a.filepath, a.localpath
|
||||
FROM build_artifact a
|
||||
WHERE a.build = ?
|
||||
|
@ -31,7 +31,7 @@ let insert_tag =
|
|||
"INSERT INTO tag (tag) VALUES (?)"
|
||||
|
||||
let insert_job_tag =
|
||||
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
||||
Caqti_type.(tup3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"
|
||||
|
||||
|
|
|
@ -55,11 +55,11 @@ let drop_input_id_from_build =
|
|||
|
||||
let builds =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t4
|
||||
Caqti_type.tup4
|
||||
Builder_db.Rep.untyped_id
|
||||
Caqti_type.octets
|
||||
Caqti_type.octets
|
||||
Caqti_type.octets @@
|
||||
Builder_db.Rep.cstruct
|
||||
Builder_db.Rep.cstruct
|
||||
Builder_db.Rep.cstruct @@
|
||||
{| SELECT b.id, opam.sha256, env.sha256, system.sha256
|
||||
FROM build b, build_artifact opam, build_artifact env, build_artifact system
|
||||
WHERE opam.filepath = 'opam-switch' AND env.filepath = 'build-environment'
|
||||
|
@ -68,7 +68,7 @@ let builds =
|
|||
|}
|
||||
|
||||
let set_input_id =
|
||||
Caqti_type.t2 Builder_db.Rep.untyped_id Caqti_type.octets ->. Caqti_type.unit @@
|
||||
Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.cstruct ->. Caqti_type.unit @@
|
||||
"UPDATE build SET input_id = $2 WHERE id = $1"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
|
@ -76,7 +76,7 @@ let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
|||
Db.exec add_input_id_to_build () >>= fun () ->
|
||||
Db.collect_list builds () >>= fun builds ->
|
||||
Grej.list_iter_result (fun (id, opam_sha, env_sha, pkg_sha) ->
|
||||
let input_id = Digestif.SHA256.(to_raw_string (digestv_string [ opam_sha ; env_sha ; pkg_sha ])) in
|
||||
let input_id = Mirage_crypto.Hash.SHA256.digest (Cstruct.concat [ opam_sha ; env_sha ; pkg_sha ]) in
|
||||
Db.exec set_input_id (id, input_id))
|
||||
builds >>= fun () ->
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
|
|
@ -2,7 +2,7 @@ open Grej.Infix
|
|||
|
||||
let orb_left_in_builds =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
{| SELECT id, localpath FROM build_artifact
|
||||
WHERE filepath = 'orb.deb' OR filepath = 'orb.txz'
|
||||
|}
|
||||
|
|
|
@ -2,7 +2,7 @@ open Grej.Infix
|
|||
|
||||
let deb_debug_left_in_builds =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build)
|
||||
Caqti_type.tup4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build)
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
{| SELECT id, build, localpath, filepath FROM build_artifact
|
||||
WHERE filepath LIKE '%.deb.debug'
|
||||
|
@ -17,7 +17,7 @@ let get_localpath =
|
|||
"SELECT localpath FROM build_artifact WHERE id = ?"
|
||||
|
||||
let update_paths =
|
||||
Caqti_type.t3 (Builder_db.Rep.id `build_artifact)
|
||||
Caqti_type.tup3 (Builder_db.Rep.id `build_artifact)
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
|
||||
|
|
|
@ -2,7 +2,7 @@ open Grej.Infix
|
|||
|
||||
let all_builds_with_binary : (unit, [`build] Builder_db.Rep.id * [`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact)
|
||||
Caqti_type.tup4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact)
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
"SELECT b.id, b.main_binary, a.filepath, a.localpath FROM build b, build_artifact a WHERE b.main_binary = a.id AND b.main_binary IS NOT NULL"
|
||||
|
||||
|
@ -11,14 +11,14 @@ let build_not_stripped : ([`build] Builder_db.Rep.id, [`build_artifact] Builder_
|
|||
"SELECT id FROM build_artifact WHERE build = ? AND filepath LIKE '%.debug'"
|
||||
|
||||
let update_paths : ([`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, unit, [ `Zero ]) Caqti_request.t =
|
||||
Caqti_type.t3 (Builder_db.Rep.id `build_artifact)
|
||||
Caqti_type.tup3 (Builder_db.Rep.id `build_artifact)
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
|
||||
|
||||
let add_artifact : ((Fpath.t * Fpath.t * string) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t =
|
||||
Caqti_type.(t2 (t3 Builder_db.Rep.fpath Builder_db.Rep.fpath Caqti_type.octets)
|
||||
(t2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->.
|
||||
let add_artifact : ((Fpath.t * Fpath.t * Cstruct.t) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t =
|
||||
Caqti_type.(tup2 (tup3 Builder_db.Rep.fpath Builder_db.Rep.fpath Builder_db.Rep.cstruct)
|
||||
(tup2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->.
|
||||
Caqti_type.unit @@
|
||||
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) VALUES (?, ?, ?, ?, ?)"
|
||||
|
||||
|
@ -48,8 +48,7 @@ let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
|||
in
|
||||
assert (r = 0);
|
||||
Bos.OS.File.read (Fpath.append datadir artifact_lpath) >>= fun data ->
|
||||
let size = Int64.of_int (String.length data)
|
||||
and sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
||||
let size = Int64.of_int (String.length data) and sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||
Db.exec update_paths (artifact_id, new_artifact_lpath, path artifact_fpath) >>= fun () ->
|
||||
Db.exec add_artifact ((artifact_fpath, artifact_lpath, sha256), (size, build_id)) >>= fun () ->
|
||||
Db.find Builder_db.last_insert_rowid () >>= fun new_build_artifact_id ->
|
||||
|
|
|
@ -2,11 +2,11 @@ open Grej.Infix
|
|||
|
||||
let all_build_artifacts_with_dot_slash : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
"SELECT id, filepath FROM build_artifact WHERE filepath LIKE './%'"
|
||||
|
||||
let update_path : ([`build_artifact] Builder_db.Rep.id * Fpath.t, unit, [< `Zero | `One | `Many > `Zero ]) Caqti_request.t =
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->.
|
||||
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build_artifact SET filepath = $2 WHERE id = $1"
|
||||
|
||||
|
|
|
@ -40,11 +40,11 @@ let copy_old_build =
|
|||
|
||||
let old_build_execution_result =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@
|
||||
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@
|
||||
"SELECT id, result_kind, result_code FROM build"
|
||||
|
||||
let update_new_build_execution_result =
|
||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@
|
||||
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@
|
||||
"UPDATE new_build SET result_code = $2 WHERE id = $1"
|
||||
|
||||
let old_build =
|
||||
|
@ -83,11 +83,11 @@ let copy_new_build =
|
|||
|
||||
let new_build_execution_result =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) @@
|
||||
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int) @@
|
||||
"SELECT id, result_code FROM build"
|
||||
|
||||
let update_old_build_execution_result =
|
||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->.
|
||||
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE new_build SET result_kind = $2, result_code = $3 WHERE id = $1"
|
||||
|
||||
|
|
|
@ -2,12 +2,12 @@ open Grej.Infix
|
|||
|
||||
let all_build_artifacts_like_hashes : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%.build-hashes'"
|
||||
|
||||
let all_build_artifacts_like_readme : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%README.md'"
|
||||
|
||||
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
|
|
|
@ -8,8 +8,8 @@ open Grej.Infix
|
|||
module Asn = struct
|
||||
let decode_strict codec cs =
|
||||
match Asn.decode codec cs with
|
||||
| Ok (a, rest) ->
|
||||
if String.length rest = 0
|
||||
| Ok (a, cs) ->
|
||||
if Cstruct.length cs = 0
|
||||
then Ok a
|
||||
else Error "trailing bytes"
|
||||
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
||||
|
@ -95,24 +95,24 @@ let copy_from_new_build =
|
|||
|
||||
let old_build_console_script =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
(t2 string Builder_db.Rep.uuid) octets string) @@
|
||||
Caqti_type.(tup4 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
(tup2 string Builder_db.Rep.uuid) Builder_db.Rep.cstruct string) @@
|
||||
"SELECT b.id, job.name, b.uuid, b.console, b.script FROM build b, job WHERE b.job = job.id"
|
||||
|
||||
let update_new_build_console_script =
|
||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath) ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
|
||||
|
||||
let new_build_console_script =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
Caqti_type.tup3 (Builder_db.Rep.id (`build : [ `build ]))
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
"SELECT id, console, script FROM build"
|
||||
|
||||
let update_old_build_console_script =
|
||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) octets string) ->.
|
||||
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) Builder_db.Rep.cstruct string) ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
|
||||
|
||||
|
|
|
@ -2,13 +2,13 @@ open Grej.Infix
|
|||
|
||||
let mixups =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build]))
|
||||
Caqti_type.tup3 (Builder_db.Rep.id (`build : [`build]))
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||
"SELECT id, console, script FROM build \
|
||||
WHERE console LIKE '%/script.txt' AND script LIKE '%/console.txt'"
|
||||
|
||||
let fixup =
|
||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build]))
|
||||
Caqti_type.tup3 (Builder_db.Rep.id (`build : [`build]))
|
||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build SET console = $2, script = $3 WHERE id = $1"
|
||||
|
|
|
@ -73,11 +73,11 @@ let copy_from_new_build =
|
|||
|}
|
||||
|
||||
let build_id_and_user =
|
||||
Caqti_type.unit ->* Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@
|
||||
Caqti_type.unit ->* Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@
|
||||
"SELECT id, user FROM build"
|
||||
|
||||
let update_new_build_platform =
|
||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@
|
||||
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@
|
||||
"UPDATE new_build SET platform = $2 WHERE id = $1"
|
||||
|
||||
let drop_build =
|
||||
|
|
|
@ -23,21 +23,21 @@ let new_uuid_rep =
|
|||
|
||||
let uuids_byte_encoded_q =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@
|
||||
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@
|
||||
"SELECT id, uuid FROM build"
|
||||
|
||||
let uuids_hex_encoded_q =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@
|
||||
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@
|
||||
"SELECT id, uuid FROM build"
|
||||
|
||||
let migrate_q =
|
||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->.
|
||||
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build SET uuid = $2 WHERE id = $1"
|
||||
|
||||
let rollback_q =
|
||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->.
|
||||
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->.
|
||||
Caqti_type.unit @@
|
||||
"UPDATE build SET uuid = $2 WHERE id = $1"
|
||||
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
let new_version = 17L and old_version = 16L
|
||||
and identifier = "2023-09-11"
|
||||
and migrate_doc = "index failed builds on main binary is null"
|
||||
and rollback_doc = "index failed builds on exit code"
|
||||
|
||||
open Grej.Syntax
|
||||
|
||||
let drop_idx_build_failed =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
"DROP INDEX idx_build_failed"
|
||||
|
||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
||||
let* () = Db.exec drop_idx_build_failed () in
|
||||
let* () =
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
||||
WHERE main_binary IS NULL")
|
||||
()
|
||||
in
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
||||
let* () = Db.exec drop_idx_build_failed () in
|
||||
let* () =
|
||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
||||
WHERE result_code <> 0")
|
||||
()
|
||||
in
|
||||
Db.exec (Grej.set_version old_version) ()
|
|
@ -1,162 +0,0 @@
|
|||
let new_version = 18L and old_version = 17L
|
||||
and identifier = "2023-09-14"
|
||||
and migrate_doc = "Artifacts are stored content-addressed in the filesystem"
|
||||
and rollback_doc = "Artifacts are stored under their build's job name and uuid"
|
||||
|
||||
open Grej.Syntax
|
||||
|
||||
let new_build_artifact =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build_artifact (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL,
|
||||
sha256 BLOB NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
build INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(build) REFERENCES build(id),
|
||||
UNIQUE(build, filepath)
|
||||
)
|
||||
|}
|
||||
|
||||
let old_build_artifact =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
{| CREATE TABLE new_build_artifact (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
localpath TEXT NOT NULL, -- local path to the file on disk
|
||||
sha256 BLOB NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
build INTEGER NOT NULL,
|
||||
|
||||
FOREIGN KEY(build) REFERENCES build(id),
|
||||
UNIQUE(build, filepath)
|
||||
)
|
||||
|}
|
||||
|
||||
let idx_build_artifact_sha256 =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)"
|
||||
|
||||
let idx_build_artifact_build =
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)"
|
||||
|
||||
let copy_new_build_artifact =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
{| INSERT INTO new_build_artifact(id, filepath, sha256, size, build)
|
||||
SELECT id, filepath, sha256, size, build
|
||||
FROM build_artifact
|
||||
|}
|
||||
|
||||
let copy_old_build_artifact =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
{| INSERT INTO new_build_artifact(id, filepath, localpath, sha256, size, build)
|
||||
SELECT a.id, a.filepath,
|
||||
j.name || '/' || b.uuid || '/output/' || a.filepath,
|
||||
a.sha256, a.size, a.build
|
||||
FROM build_artifact a, job j, build b
|
||||
WHERE b.id = a.build AND j.id = b.job
|
||||
|}
|
||||
|
||||
let new_build_artifact_paths =
|
||||
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
|
||||
{| SELECT localpath, '_artifacts/' || substr(lower(hex(sha256)), 1, 2) || '/' || lower(hex(sha256))
|
||||
FROM build_artifact
|
||||
|}
|
||||
|
||||
let old_build_artifact_paths =
|
||||
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
|
||||
{| SELECT '_artifacts/' || substr(lower(hex(a.sha256)), 1, 2) || '/' || lower(hex(a.sha256)),
|
||||
j.name || '/' || b.uuid || '/output/' || a.filepath
|
||||
FROM build_artifact a, job j, build b
|
||||
WHERE b.id = a.build AND j.id = b.job
|
||||
|}
|
||||
|
||||
let drop_build_artifact =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
"DROP TABLE build_artifact"
|
||||
|
||||
let rename_build_artifact =
|
||||
Caqti_type.(unit ->. unit) @@
|
||||
"ALTER TABLE new_build_artifact RENAME TO build_artifact"
|
||||
|
||||
let move_paths ?force datadir (old_path, new_path) =
|
||||
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
|
||||
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
|
||||
Bos.OS.Path.move ?force old_path new_path
|
||||
|
||||
let copy_paths datadir (old_path, new_path) =
|
||||
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
|
||||
let new_path_tmp = Fpath.(new_path + "tmp") in
|
||||
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
|
||||
let cmd = Bos.Cmd.(v "cp" % p old_path % p new_path_tmp) in
|
||||
let* () =
|
||||
match Bos.OS.Cmd.run_status cmd with
|
||||
| Ok `Exited 0 ->
|
||||
Ok ()
|
||||
| Ok status ->
|
||||
let _ = Bos.OS.Path.delete new_path_tmp in
|
||||
Error (`Msg (Fmt.str "cp failed: %a" Bos.OS.Cmd.pp_status status))
|
||||
| Error _ as e ->
|
||||
let _ = Bos.OS.Path.delete new_path_tmp in
|
||||
e
|
||||
in
|
||||
Bos.OS.Path.move ~force:true new_path_tmp new_path
|
||||
|
||||
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
||||
let* () = Db.exec new_build_artifact () in
|
||||
let* () = Db.exec copy_new_build_artifact () in
|
||||
let* () = Db.iter_s new_build_artifact_paths (move_paths ~force:true datadir) () in
|
||||
let* () = Db.exec drop_build_artifact () in
|
||||
let* () = Db.exec rename_build_artifact () in
|
||||
let* () = Db.exec idx_build_artifact_sha256 () in
|
||||
let* () = Db.exec idx_build_artifact_build () in
|
||||
Db.exec (Grej.set_version new_version) ()
|
||||
|
||||
let rollback datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
||||
let* () = Db.exec old_build_artifact () in
|
||||
let* () = Db.exec copy_old_build_artifact () in
|
||||
let* () = Db.iter_s old_build_artifact_paths (copy_paths datadir) () in
|
||||
let* () =
|
||||
Db.iter_s old_build_artifact_paths
|
||||
(fun (old_path, _new_path) ->
|
||||
Bos.OS.Path.delete Fpath.(datadir // v old_path))
|
||||
()
|
||||
in
|
||||
let* () = Db.exec drop_build_artifact () in
|
||||
let* () = Db.exec rename_build_artifact () in
|
||||
let* () = Db.exec idx_build_artifact_sha256 () in
|
||||
Db.exec (Grej.set_version old_version) ()
|
||||
|
||||
(* migration failed but managed to move *some* files *)
|
||||
let fixup_migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
||||
let* () =
|
||||
Db.iter_s new_build_artifact_paths
|
||||
(fun (old_path, new_path) ->
|
||||
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
|
||||
let* new_exists = Bos.OS.Path.exists Fpath.(datadir // v new_path) in
|
||||
if new_exists && not old_exists then
|
||||
copy_paths datadir (new_path, old_path)
|
||||
else Ok ())
|
||||
()
|
||||
in
|
||||
Db.iter_s new_build_artifact_paths
|
||||
(fun (_old_path, new_path) ->
|
||||
Bos.OS.Path.delete Fpath.(datadir // v new_path))
|
||||
()
|
||||
|
||||
(* rollback failed but some or all artifacts were copied *)
|
||||
let fixup_rollback datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
||||
Db.iter_s old_build_artifact_paths
|
||||
(fun (old_path, new_path) ->
|
||||
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
|
||||
if old_exists then
|
||||
Bos.OS.Path.delete Fpath.(datadir // v new_path)
|
||||
else
|
||||
move_paths datadir (new_path, old_path))
|
||||
()
|
|
@ -1,9 +1,9 @@
|
|||
opam-version: "2.0"
|
||||
maintainer: "Reynir Björnsson <reynir@reynir.dk>"
|
||||
authors: ["Reynir Björnsson <reynir@reynir.dk>"]
|
||||
homepage: "https://github.com/robur-coop/builder-web"
|
||||
dev-repo: "git+https://github.com/robur-coop/builder-web.git"
|
||||
bug-reports: "https://github.com/robur-coop/builder-web/issues"
|
||||
homepage: "https://github.com/roburio/builder-web"
|
||||
dev-repo: "git+https://github.com/roburio/builder-web.git"
|
||||
bug-reports: "https://github.com/roburio/builder-web/issues"
|
||||
license: "ISC"
|
||||
|
||||
build: [
|
||||
|
@ -17,16 +17,18 @@ build: [
|
|||
depends: [
|
||||
"ocaml" {>= "4.13.0"}
|
||||
"dune" {>= "2.7.0"}
|
||||
"builder" {>= "0.4.0"}
|
||||
"dream" {>= "1.0.0~alpha7"}
|
||||
"builder" {>= "0.2.0"}
|
||||
"dream" {= "1.0.0~alpha4"}
|
||||
"cstruct" {>= "6.0.0"}
|
||||
"bos"
|
||||
"ohex" {>= "0.2.0"}
|
||||
"lwt" {>= "5.7.0"}
|
||||
"caqti" {>= "2.1.2"}
|
||||
"hex"
|
||||
"lwt" {>= "5.6.0"}
|
||||
"caqti" {>= "1.8.0"}
|
||||
"caqti-lwt"
|
||||
"caqti-driver-sqlite3"
|
||||
"mirage-crypto-rng" {>= "0.11.0"}
|
||||
"kdf"
|
||||
"pbkdf"
|
||||
"mirage-crypto-rng"
|
||||
"scrypt-kdf"
|
||||
"opam-core"
|
||||
"opam-format" {>= "2.1.0"}
|
||||
"metrics" {>= "0.3.0"}
|
||||
|
@ -37,27 +39,27 @@ depends: [
|
|||
"tyxml" {>= "4.3.0"}
|
||||
"ptime"
|
||||
"duration"
|
||||
"asn1-combinators" {>= "0.3.0"}
|
||||
"mirage-crypto"
|
||||
"asn1-combinators"
|
||||
"logs"
|
||||
"cmdliner" {>= "1.1.0"}
|
||||
"uri"
|
||||
"fmt" {>= "0.8.7"}
|
||||
"cmarkit" {>= "0.3.0"}
|
||||
"tar" {>= "3.0.0"}
|
||||
"tar-unix" {>= "3.0.0"}
|
||||
"omd"
|
||||
"tar"
|
||||
"owee"
|
||||
"solo5-elftool" {>= "0.3.0"}
|
||||
"decompress" {>= "1.5.0"}
|
||||
"digestif" {>= "1.2.0"}
|
||||
"alcotest" {>= "1.2.0" & with-test}
|
||||
"decompress"
|
||||
"alcotest" {with-test}
|
||||
"ppx_deriving" {with-test}
|
||||
"ppx_deriving_yojson" {with-test}
|
||||
"yojson" {with-test}
|
||||
"conan-unix" {>= "0.0.2"}
|
||||
"conan-database" {>= "0.0.2"}
|
||||
]
|
||||
|
||||
synopsis: "Web interface for builder"
|
||||
description: """
|
||||
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
||||
Builder-web takes in submissions of builds, typically from [builder](https://github.com/roburio/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
||||
Produced binaries can be downloaded and executed.
|
||||
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
|
||||
"""
|
||||
|
|
195
db/builder_db.ml
195
db/builder_db.ml
|
@ -4,15 +4,15 @@ open Caqti_request.Infix
|
|||
|
||||
let application_id = 1234839235l
|
||||
|
||||
(* Please update this when making changes! And also update
|
||||
packaging/batch-viz.sh and packaging/visualizations.sh. *)
|
||||
let current_version = 18L
|
||||
(* Please update this when making changes! *)
|
||||
let current_version = 16L
|
||||
|
||||
type 'a id = 'a Rep.id
|
||||
|
||||
type file = Rep.file = {
|
||||
filepath : Fpath.t;
|
||||
sha256 : string;
|
||||
localpath : Fpath.t;
|
||||
sha256 : Cstruct.t;
|
||||
size : int;
|
||||
}
|
||||
|
||||
|
@ -57,7 +57,7 @@ module Job = struct
|
|||
|
||||
let get_all_with_section_synopsis =
|
||||
Caqti_type.unit ->*
|
||||
Caqti_type.(t4 (id `job) string (option string) (option string)) @@
|
||||
Caqti_type.(tup4 (id `job) string (option string) (option string)) @@
|
||||
{| SELECT j.id, j.name, section.value, synopsis.value
|
||||
FROM job j, tag section_tag, tag synopsis_tag
|
||||
LEFT JOIN job_tag section ON section.job = j.id AND section.tag = section_tag.id
|
||||
|
@ -117,15 +117,15 @@ module Job_tag = struct
|
|||
"DROP TABLE IF EXISTS job_tag"
|
||||
|
||||
let add =
|
||||
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
||||
Caqti_type.(tup3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
||||
"INSERT INTO job_tag (tag, value, job) VALUES ($1, $2, $3)"
|
||||
|
||||
let update =
|
||||
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
||||
Caqti_type.(tup3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
||||
"UPDATE job_tag SET value = $2 WHERE tag = $1 AND job = $3"
|
||||
|
||||
let get_value =
|
||||
Caqti_type.(t2 (id `tag) (id `job)) ->? Caqti_type.string @@
|
||||
Caqti_type.(tup2 (id `tag) (id `job)) ->? Caqti_type.string @@
|
||||
"SELECT value FROM job_tag WHERE tag = ? AND job = ?"
|
||||
|
||||
let remove_by_job =
|
||||
|
@ -140,6 +140,7 @@ module Build_artifact = struct
|
|||
{| CREATE TABLE build_artifact (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filepath TEXT NOT NULL, -- the path as in the build
|
||||
localpath TEXT NOT NULL, -- local path to the file on disk
|
||||
sha256 BLOB NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
build INTEGER NOT NULL,
|
||||
|
@ -155,30 +156,26 @@ module Build_artifact = struct
|
|||
|
||||
let get =
|
||||
id `build_artifact ->! file @@
|
||||
{| SELECT filepath, sha256, size
|
||||
{| SELECT filepath, localpath, sha256, size
|
||||
FROM build_artifact WHERE id = ? |}
|
||||
|
||||
let get_by_build_uuid =
|
||||
Caqti_type.t2 uuid fpath ->? Caqti_type.t2 (id `build_artifact) file @@
|
||||
Caqti_type.tup2 uuid fpath ->? Caqti_type.tup2 (id `build_artifact) file @@
|
||||
{| SELECT build_artifact.id, build_artifact.filepath,
|
||||
build_artifact.sha256, build_artifact.size
|
||||
build_artifact.localpath, build_artifact.sha256, build_artifact.size
|
||||
FROM build_artifact
|
||||
INNER JOIN build ON build.id = build_artifact.build
|
||||
WHERE build.uuid = ? AND build_artifact.filepath = ?
|
||||
|}
|
||||
|
||||
let get_all_by_build =
|
||||
id `build ->* Caqti_type.(t2 (id `build_artifact) file) @@
|
||||
"SELECT id, filepath, sha256, size FROM build_artifact WHERE build = ?"
|
||||
|
||||
let exists =
|
||||
Caqti_type.octets ->! Caqti_type.bool @@
|
||||
"SELECT EXISTS(SELECT 1 FROM build_artifact WHERE sha256 = ?)"
|
||||
id `build ->* Caqti_type.(tup2 (id `build_artifact) file) @@
|
||||
"SELECT id, filepath, localpath, sha256, size FROM build_artifact WHERE build = ?"
|
||||
|
||||
let add =
|
||||
Caqti_type.(t2 file (id `build)) ->. Caqti_type.unit @@
|
||||
"INSERT INTO build_artifact (filepath, sha256, size, build) \
|
||||
VALUES (?, ?, ?, ?)"
|
||||
Caqti_type.(tup2 file (id `build)) ->. Caqti_type.unit @@
|
||||
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) \
|
||||
VALUES (?, ?, ?, ?, ?)"
|
||||
|
||||
let remove_by_build =
|
||||
id `build ->. Caqti_type.unit @@
|
||||
|
@ -199,55 +196,34 @@ module Build = struct
|
|||
script : Fpath.t;
|
||||
platform : string;
|
||||
main_binary : [`build_artifact] id option;
|
||||
input_id : string option;
|
||||
input_id : Cstruct.t option;
|
||||
user_id : [`user] id;
|
||||
job_id : [`job] id;
|
||||
}
|
||||
|
||||
let pp ppf t =
|
||||
Fmt.pf ppf "@[<hov>{ uuid=@ %a;@ \
|
||||
start=@ %a;@ \
|
||||
finish=@ %a;@ \
|
||||
result=@ @[<hov>%a@];@ \
|
||||
console=@ %a;@ \
|
||||
script=@ %a;@ \
|
||||
platform=@ %S;@ \
|
||||
main_binary=@ @[<hov>%a@];@ \
|
||||
input_id=@ @[<hov>%a@];@ \
|
||||
user_id=@ %Lx;@ \
|
||||
job_id=@ %Lx;@ }@]"
|
||||
Uuidm.pp t.uuid
|
||||
Ptime.pp t.start
|
||||
Ptime.pp t.finish
|
||||
Builder.pp_execution_result t.result
|
||||
Fpath.pp t.console
|
||||
Fpath.pp t.script
|
||||
t.platform
|
||||
Fmt.(Dump.option int64) t.main_binary
|
||||
Fmt.(Dump.option string) t.input_id
|
||||
t.user_id
|
||||
t.job_id
|
||||
|
||||
|
||||
let t =
|
||||
let rep =
|
||||
Caqti_type.(t11
|
||||
uuid
|
||||
Rep.ptime
|
||||
Rep.ptime
|
||||
execution_result
|
||||
fpath
|
||||
fpath
|
||||
string
|
||||
(option (Rep.id `build_artifact))
|
||||
(option octets)
|
||||
Caqti_type.(tup3
|
||||
(tup4
|
||||
uuid
|
||||
(tup2
|
||||
Rep.ptime
|
||||
Rep.ptime)
|
||||
(tup2
|
||||
execution_result
|
||||
fpath)
|
||||
(tup4
|
||||
fpath
|
||||
string
|
||||
(option (Rep.id `build_artifact))
|
||||
(option Rep.cstruct)))
|
||||
(id `user)
|
||||
(id `job))
|
||||
in
|
||||
let encode { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id } =
|
||||
Ok (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id)
|
||||
Ok ((uuid, (start, finish), (result, console), (script, platform, main_binary, input_id)), user_id, job_id)
|
||||
in
|
||||
let decode (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id) =
|
||||
let decode ((uuid, (start, finish), (result, console), (script, platform, main_binary, input_id)), user_id, job_id) =
|
||||
Ok { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id }
|
||||
in
|
||||
Caqti_type.custom ~encode ~decode rep
|
||||
|
@ -282,7 +258,7 @@ module Build = struct
|
|||
"DROP TABLE IF EXISTS build"
|
||||
|
||||
let get_by_uuid =
|
||||
Rep.uuid ->? Caqti_type.t2 (id `build) t @@
|
||||
Rep.uuid ->? Caqti_type.tup2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg,
|
||||
console, script, platform, main_binary, input_id, user, job
|
||||
|
@ -291,7 +267,7 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_all =
|
||||
id `job ->* Caqti_type.t2 (id `build) t @@
|
||||
id `job ->* Caqti_type.tup2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console,
|
||||
script, platform, main_binary, input_id, user, job
|
||||
|
@ -301,20 +277,20 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_all_failed =
|
||||
Caqti_type.(t3 int int (option string)) ->* Caqti_type.t2 Caqti_type.string t @@
|
||||
Caqti_type.(tup3 int int (option string)) ->* Caqti_type.tup2 Caqti_type.string t @@
|
||||
{| SELECT job.name, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script, b.platform,
|
||||
b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build b
|
||||
INNER JOIN job ON job.id = b.job
|
||||
WHERE b.main_binary IS NULL AND ($3 IS NULL OR b.platform = $3)
|
||||
WHERE b.result_code <> 0 AND ($3 IS NULL OR b.platform = $3)
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
LIMIT $2
|
||||
OFFSET $1
|
||||
|}
|
||||
|
||||
let get_all_artifact_sha =
|
||||
Caqti_type.(t2 (id `job) (option string)) ->* Caqti_type.octets @@
|
||||
Caqti_type.(tup2 (id `job) (option string)) ->* Rep.cstruct @@
|
||||
{| SELECT DISTINCT a.sha256
|
||||
FROM build_artifact a, build b
|
||||
WHERE b.job = $1 AND b.main_binary = a.id
|
||||
|
@ -323,79 +299,40 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_failed_builds =
|
||||
Caqti_type.(t2 (id `job) (option string)) ->* t @@
|
||||
Caqti_type.(tup2 (id `job) (option string)) ->* t @@
|
||||
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = $1
|
||||
AND main_binary IS NULL
|
||||
WHERE job = $1 AND result_code <> 0
|
||||
AND ($2 IS NULL OR platform = $2)
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
|}
|
||||
|
||||
let get_latest_successful_with_binary =
|
||||
Caqti_type.(t2 (id `job) string) ->? Caqti_type.t3 (id `build) t file @@
|
||||
Caqti_type.(tup2 (id `job) string) ->? Caqti_type.tup3 (id `build) t file_opt @@
|
||||
{| SELECT b.id,
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job,
|
||||
a.filepath, a.sha256, a.size
|
||||
FROM build b, build_artifact a
|
||||
WHERE b.main_binary = a.id AND b.job = $1 AND b.platform = $2
|
||||
AND b.main_binary IS NOT NULL
|
||||
a.filepath, a.localpath, a.sha256, a.size
|
||||
FROM build b
|
||||
LEFT JOIN build_artifact a ON
|
||||
b.main_binary = a.id
|
||||
WHERE b.job = $1 AND b.platform = $2 AND b.result_code = 0
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
||||
let get_builds_older_than =
|
||||
Caqti_type.(t3 (id `job) (option string) Rep.ptime) ->* Caqti_type.t2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = $1
|
||||
AND ($2 IS NULL OR platform = $2)
|
||||
AND (finish_d < $3 OR (finish_d = $3 AND finish_ps <= $4))
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
|}
|
||||
|
||||
let get_builds_excluding_latest_n =
|
||||
Caqti_type.(t3 (id `job) (option string) int) ->* Caqti_type.t2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = $1
|
||||
AND ($2 IS NULL OR platform = $2)
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
LIMIT -1 OFFSET $3
|
||||
|}
|
||||
(* "LIMIT -1 OFFSET n" is all rows except the first n *)
|
||||
|
||||
let get_nth_latest_successful =
|
||||
Caqti_type.(t3 (id `job) (option string) int) ->? Caqti_type.t2 (id `build) t @@
|
||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
FROM build
|
||||
WHERE job = $1
|
||||
AND ($2 IS NULL OR platform = $2)
|
||||
AND main_binary IS NOT NULL
|
||||
ORDER BY start_d DESC, start_ps DESC
|
||||
LIMIT 1 OFFSET $3
|
||||
|}
|
||||
|
||||
let get_latest_successful =
|
||||
Caqti_type.(t2 (id `job) (option string)) ->? t @@
|
||||
Caqti_type.(tup2 (id `job) (option string)) ->? t @@
|
||||
{| SELECT
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job
|
||||
FROM build b
|
||||
WHERE b.job = $1
|
||||
WHERE b.job = $1 AND b.result_code = 0
|
||||
AND ($2 IS NULL OR b.platform = $2)
|
||||
AND b.main_binary IS NOT NULL
|
||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||
LIMIT 1
|
||||
|}
|
||||
|
@ -409,7 +346,7 @@ module Build = struct
|
|||
FROM build b, build b0, build_artifact a, build_artifact a0
|
||||
WHERE b0.id = ? AND b0.job = b.job AND
|
||||
b.platform = b0.platform AND
|
||||
b.main_binary IS NOT NULL AND
|
||||
b.result_code = 0 AND
|
||||
a.id = b.main_binary AND a0.id = b0.main_binary AND
|
||||
a.sha256 <> a0.sha256 AND
|
||||
(b0.start_d > b.start_d OR b0.start_d = b.start_d AND b0.start_ps > b.start_ps)
|
||||
|
@ -426,7 +363,7 @@ module Build = struct
|
|||
FROM build b, build b0, build_artifact a, build_artifact a0
|
||||
WHERE b0.id = ? AND b0.job = b.job AND
|
||||
b.platform = b0.platform AND
|
||||
b.main_binary IS NOT NULL AND
|
||||
b.result_code = 0 AND
|
||||
a.id = b.main_binary AND a0.id = b0.main_binary AND
|
||||
a.sha256 <> a0.sha256 AND
|
||||
(b0.start_d < b.start_d OR b0.start_d = b.start_d AND b0.start_ps < b.start_ps)
|
||||
|
@ -446,7 +383,7 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_same_input_different_output_hashes =
|
||||
id `build ->* Caqti_type.octets @@
|
||||
id `build ->* Rep.cstruct @@
|
||||
{| SELECT DISTINCT a.sha256
|
||||
FROM build b0, build_artifact a0, build b, build_artifact a
|
||||
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 <> a.sha256
|
||||
|
@ -455,7 +392,7 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_different_input_same_output_input_ids =
|
||||
id `build ->* Caqti_type.octets @@
|
||||
id `build ->* Rep.cstruct @@
|
||||
{| SELECT DISTINCT b.input_id
|
||||
FROM build b0, build_artifact a0, build b, build_artifact a
|
||||
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 = a.sha256
|
||||
|
@ -463,7 +400,7 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_one_by_input_id =
|
||||
Caqti_type.octets ->! t @@
|
||||
Rep.cstruct ->! t @@
|
||||
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
||||
result_code, result_msg, console, script,
|
||||
platform, main_binary, input_id, user, job
|
||||
|
@ -487,7 +424,7 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_by_hash =
|
||||
Caqti_type.octets ->! t @@
|
||||
Rep.cstruct ->! t @@
|
||||
{| SELECT
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
|
@ -500,11 +437,11 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_with_main_binary_by_hash =
|
||||
Caqti_type.octets ->! Caqti_type.t2 t file_opt @@
|
||||
Rep.cstruct ->! Caqti_type.tup2 t file_opt @@
|
||||
{| SELECT b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg, b.console, b.script,
|
||||
b.platform, b.main_binary, b.input_id, b.user, b.job,
|
||||
a.filepath, a.sha256, a.size
|
||||
a.filepath, a.localpath, a.sha256, a.size
|
||||
FROM build_artifact a
|
||||
INNER JOIN build b ON b.id = a.build
|
||||
WHERE a.sha256 = ?
|
||||
|
@ -513,7 +450,7 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let get_with_jobname_by_hash =
|
||||
Caqti_type.octets ->? Caqti_type.t2 Caqti_type.string t @@
|
||||
Rep.cstruct ->? Caqti_type.tup2 Caqti_type.string t @@
|
||||
{| SELECT job.name,
|
||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||
b.result_code, b.result_msg,
|
||||
|
@ -527,7 +464,7 @@ module Build = struct
|
|||
|}
|
||||
|
||||
let set_main_binary =
|
||||
Caqti_type.t2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@
|
||||
Caqti_type.tup2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@
|
||||
"UPDATE build SET main_binary = $2 WHERE id = $1"
|
||||
|
||||
let remove =
|
||||
|
@ -555,7 +492,7 @@ module User = struct
|
|||
"DROP TABLE IF EXISTS user"
|
||||
|
||||
let get_user =
|
||||
Caqti_type.string ->? Caqti_type.t2 (id `user) user_info @@
|
||||
Caqti_type.string ->? Caqti_type.tup2 (id `user) user_info @@
|
||||
{| SELECT id, username, password_hash, password_salt,
|
||||
scrypt_n, scrypt_r, scrypt_p, restricted
|
||||
FROM user
|
||||
|
@ -609,15 +546,15 @@ module Access_list = struct
|
|||
"DROP TABLE IF EXISTS access_list"
|
||||
|
||||
let get =
|
||||
Caqti_type.t2 (id `user) (id `job) ->! id `access_list @@
|
||||
Caqti_type.tup2 (id `user) (id `job) ->! id `access_list @@
|
||||
"SELECT id FROM access_list WHERE user = ? AND job = ?"
|
||||
|
||||
let add =
|
||||
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@
|
||||
Caqti_type.tup2 (id `user) (id `job) ->. Caqti_type.unit @@
|
||||
"INSERT INTO access_list (user, job) VALUES (?, ?)"
|
||||
|
||||
let remove =
|
||||
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@
|
||||
Caqti_type.tup2 (id `user) (id `job) ->. Caqti_type.unit @@
|
||||
"DELETE FROM access_list WHERE user = ? AND job = ?"
|
||||
|
||||
let remove_by_job =
|
||||
|
@ -648,15 +585,13 @@ let migrate = [
|
|||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE main_binary IS NULL";
|
||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE result_code <> 0";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_input_id ON build(input_id)";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_main_binary ON build(main_binary)";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)";
|
||||
set_current_version;
|
||||
set_application_id;
|
||||
]
|
||||
|
@ -670,8 +605,6 @@ let rollback = [
|
|||
Build.rollback;
|
||||
Job.rollback;
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_artifact_build";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_artifact_sha256";
|
||||
Caqti_type.unit ->. Caqti_type.unit @@
|
||||
"DROP INDEX IF EXISTS idx_build_failed";
|
||||
|
|
|
@ -3,7 +3,8 @@ module Rep : sig
|
|||
type 'a id
|
||||
type file = {
|
||||
filepath : Fpath.t;
|
||||
sha256 : string;
|
||||
localpath : Fpath.t;
|
||||
sha256 : Cstruct.t;
|
||||
size : int;
|
||||
}
|
||||
|
||||
|
@ -13,6 +14,7 @@ module Rep : sig
|
|||
val uuid : Uuidm.t Caqti_type.t
|
||||
val ptime : Ptime.t Caqti_type.t
|
||||
val fpath : Fpath.t Caqti_type.t
|
||||
val cstruct : Cstruct.t Caqti_type.t
|
||||
val file : file Caqti_type.t
|
||||
val execution_result : Builder.execution_result Caqti_type.t
|
||||
val console : (int * string) list Caqti_type.t
|
||||
|
@ -21,7 +23,8 @@ type 'a id = 'a Rep.id
|
|||
|
||||
type file = Rep.file = {
|
||||
filepath : Fpath.t;
|
||||
sha256 : string;
|
||||
localpath : Fpath.t;
|
||||
sha256 : Cstruct.t;
|
||||
size : int;
|
||||
}
|
||||
|
||||
|
@ -84,7 +87,6 @@ module Build_artifact : sig
|
|||
Caqti_request.t
|
||||
val get_all_by_build :
|
||||
([`build] id, [`build_artifact] id * file, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val exists : (string, bool, [ `One ]) Caqti_request.t
|
||||
val add :
|
||||
(file * [`build] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove_by_build :
|
||||
|
@ -104,13 +106,11 @@ sig
|
|||
script : Fpath.t;
|
||||
platform : string;
|
||||
main_binary : [`build_artifact] id option;
|
||||
input_id : string option;
|
||||
input_id : Cstruct.t option;
|
||||
user_id : [`user] id;
|
||||
job_id : [`job] id;
|
||||
}
|
||||
|
||||
val pp : t Fmt.t
|
||||
|
||||
val get_by_uuid :
|
||||
(Uuidm.t, [`build] id * t, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
|
@ -119,21 +119,15 @@ sig
|
|||
val get_all_failed :
|
||||
(int * int * string option, string * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_all_artifact_sha :
|
||||
([`job] id * string option, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
([`job] id * string option, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_latest_successful_with_binary :
|
||||
([`job] id * string, [`build] id * t * file, [ `One | `Zero ])
|
||||
([`job] id * string, [`build] id * t * file option, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val get_failed_builds :
|
||||
([`job] id * string option, t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_latest_successful :
|
||||
([`job] id * string option, t, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
val get_builds_older_than :
|
||||
([`job] id * string option * Ptime.t, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_builds_excluding_latest_n :
|
||||
([`job] id * string option * int, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_nth_latest_successful :
|
||||
([`job] id * string option * int, [`build] id * t, [ `One | `Zero ]) Caqti_request.t
|
||||
val get_previous_successful_different_output :
|
||||
([`build] id, t, [ `One | `Zero ])
|
||||
Caqti_request.t
|
||||
|
@ -143,20 +137,20 @@ sig
|
|||
val get_same_input_same_output_builds :
|
||||
([`build] id, t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_same_input_different_output_hashes :
|
||||
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
([`build] id, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_different_input_same_output_input_ids :
|
||||
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
([`build] id, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val get_one_by_input_id :
|
||||
(string, t, [ `One ]) Caqti_request.t
|
||||
(Cstruct.t, t, [ `One ]) Caqti_request.t
|
||||
val get_platforms_for_job :
|
||||
([`job] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||
val add : (t, unit, [ `Zero ]) Caqti_request.t
|
||||
val get_by_hash :
|
||||
(string, t, [ `One]) Caqti_request.t
|
||||
(Cstruct.t, t, [ `One]) Caqti_request.t
|
||||
val get_with_main_binary_by_hash :
|
||||
(string, t * file option, [ `One]) Caqti_request.t
|
||||
(Cstruct.t, t * file option, [ `One]) Caqti_request.t
|
||||
val get_with_jobname_by_hash :
|
||||
(string, string * t, [ `One | `Zero]) Caqti_request.t
|
||||
(Cstruct.t, string * t, [ `One | `Zero]) Caqti_request.t
|
||||
val set_main_binary : ([`build] id * [`build_artifact] id, unit, [ `Zero ]) Caqti_request.t
|
||||
val remove : ([`build] id, unit, [ `Zero]) Caqti_request.t
|
||||
end
|
||||
|
|
2
db/dune
2
db/dune
|
@ -1,3 +1,3 @@
|
|||
(library
|
||||
(name builder_db)
|
||||
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators builder_web_auth))
|
||||
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators mirage-crypto builder_web_auth))
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
module Asn = struct
|
||||
let decode_strict codec cs =
|
||||
match Asn.decode codec cs with
|
||||
| Ok (a, rest) ->
|
||||
if String.length rest = 0
|
||||
| Ok (a, cs) ->
|
||||
if Cstruct.length cs = 0
|
||||
then Ok a
|
||||
else Error "trailing bytes"
|
||||
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
||||
|
@ -17,7 +17,7 @@ module Asn = struct
|
|||
(required ~label:"delta" int)
|
||||
(required ~label:"data" utf8_string)))
|
||||
|
||||
let console_of_str, console_to_str = projections_of console
|
||||
let console_of_cs, console_to_cs = projections_of console
|
||||
end
|
||||
|
||||
type untyped_id = int64
|
||||
|
@ -30,7 +30,8 @@ let id_to_int64 (id : 'a id) : int64 = id
|
|||
|
||||
type file = {
|
||||
filepath : Fpath.t;
|
||||
sha256 : string;
|
||||
localpath : Fpath.t;
|
||||
sha256 : Cstruct.t;
|
||||
size : int;
|
||||
}
|
||||
|
||||
|
@ -47,7 +48,7 @@ let ptime =
|
|||
let encode t = Ok (Ptime.Span.to_d_ps (Ptime.to_span t)) in
|
||||
let decode (d, ps) = Ok (Ptime.v (d, ps))
|
||||
in
|
||||
let rep = Caqti_type.(t2 int int64) in
|
||||
let rep = Caqti_type.(tup2 int int64) in
|
||||
Caqti_type.custom ~encode ~decode rep
|
||||
|
||||
let fpath =
|
||||
|
@ -56,25 +57,30 @@ let fpath =
|
|||
|> Result.map_error (fun (`Msg s) -> s) in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.string
|
||||
|
||||
let cstruct =
|
||||
let encode t = Ok (Cstruct.to_string t) in
|
||||
let decode s = Ok (Cstruct.of_string s) in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.octets
|
||||
|
||||
let file =
|
||||
let encode { filepath; sha256; size } =
|
||||
Ok (filepath, sha256, size) in
|
||||
let decode (filepath, sha256, size) =
|
||||
Ok { filepath; sha256; size } in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.(t3 fpath octets int)
|
||||
let encode { filepath; localpath; sha256; size } =
|
||||
Ok (filepath, localpath, sha256, size) in
|
||||
let decode (filepath, localpath, sha256, size) =
|
||||
Ok { filepath; localpath; sha256; size } in
|
||||
Caqti_type.custom ~encode ~decode Caqti_type.(tup4 fpath fpath cstruct int)
|
||||
|
||||
let file_opt =
|
||||
let rep = Caqti_type.(t3 (option fpath) (option octets) (option int)) in
|
||||
let rep = Caqti_type.(tup4 (option fpath) (option fpath) (option cstruct) (option int)) in
|
||||
let encode = function
|
||||
| Some { filepath; sha256; size } ->
|
||||
Ok (Some filepath, Some sha256, Some size)
|
||||
| Some { filepath; localpath; sha256; size } ->
|
||||
Ok (Some filepath, Some localpath, Some sha256, Some size)
|
||||
| None ->
|
||||
Ok (None, None, None)
|
||||
Ok (None, None, None, None)
|
||||
in
|
||||
let decode = function
|
||||
| (Some filepath, Some sha256, Some size) ->
|
||||
Ok (Some { filepath; sha256; size })
|
||||
| (None, None, None) ->
|
||||
| (Some filepath, Some localpath, Some sha256, Some size) ->
|
||||
Ok (Some { filepath; localpath; sha256; size })
|
||||
| (None, None, None, None) ->
|
||||
Ok None
|
||||
| _ ->
|
||||
(* This should not happen if the database is well-formed *)
|
||||
|
@ -103,25 +109,25 @@ let execution_result =
|
|||
else
|
||||
Error "bad encoding (unknown number)"
|
||||
in
|
||||
let rep = Caqti_type.(t2 int (option string)) in
|
||||
let rep = Caqti_type.(tup2 int (option string)) in
|
||||
Caqti_type.custom ~encode ~decode rep
|
||||
|
||||
let console =
|
||||
let encode console = Ok (Asn.console_to_str console) in
|
||||
let decode data = Asn.console_of_str data in
|
||||
Caqti_type.(custom ~encode ~decode octets)
|
||||
let encode console = Ok (Asn.console_to_cs console) in
|
||||
let decode data = Asn.console_of_cs data in
|
||||
Caqti_type.custom ~encode ~decode cstruct
|
||||
|
||||
let user_info =
|
||||
let rep = Caqti_type.(t7 string octets octets int int int bool) in
|
||||
let rep = Caqti_type.(tup4 string cstruct cstruct (tup4 int int int bool)) in
|
||||
let encode { Builder_web_auth.username;
|
||||
password_hash = `Scrypt (password_hash, password_salt, {
|
||||
Builder_web_auth.scrypt_n; scrypt_r; scrypt_p
|
||||
});
|
||||
restricted; }
|
||||
=
|
||||
Ok (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted)
|
||||
Ok (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted))
|
||||
in
|
||||
let decode (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) =
|
||||
let decode (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted)) =
|
||||
Ok { Builder_web_auth.username;
|
||||
password_hash =
|
||||
`Scrypt (password_hash, password_salt,
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
(lang dune 2.7)
|
||||
(name builder-web)
|
||||
(formatting disabled)
|
||||
|
|
|
@ -26,7 +26,7 @@ let init_datadir datadir =
|
|||
let init dbpath datadir =
|
||||
Result.bind (init_datadir datadir) @@ fun () ->
|
||||
Lwt_main.run (
|
||||
Caqti_lwt_unix.connect
|
||||
Caqti_lwt.connect
|
||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||
>>= fun (module Db : Caqti_lwt.CONNECTION) ->
|
||||
Db.find Builder_db.get_application_id () >>= fun application_id ->
|
||||
|
@ -60,12 +60,16 @@ let mime_lookup path =
|
|||
(match Fpath.to_string path with
|
||||
| "build-environment" | "opam-switch" | "system-packages" ->
|
||||
"text/plain"
|
||||
| _ ->
|
||||
| filename ->
|
||||
if Fpath.has_ext "build-hashes" path
|
||||
then "text/plain"
|
||||
else if Fpath.is_prefix Fpath.(v "bin/") path
|
||||
then "application/octet-stream"
|
||||
else Magic_mime.lookup (Fpath.to_string path))
|
||||
else match Option.bind
|
||||
(Result.to_option (Conan_unix.run_with_tree Conan_magic_database.tree filename))
|
||||
Conan.Metadata.mime with
|
||||
| Some mime_type -> mime_type
|
||||
| None -> "application/octet-stream" (* default *))
|
||||
|
||||
let string_of_html =
|
||||
Format.asprintf "%a" (Tyxml.Html.pp ())
|
||||
|
@ -134,14 +138,14 @@ module Viz_aux = struct
|
|||
viz_dir ~cachedir ~viz_typ ~version
|
||||
/ input_hash + "html"
|
||||
)
|
||||
|
||||
|
||||
let choose_versioned_viz_path
|
||||
~cachedir
|
||||
~viz_typ
|
||||
~viz_input_hash
|
||||
~current_version =
|
||||
let ( >>= ) = Result.bind in
|
||||
let rec aux current_version =
|
||||
let rec aux current_version =
|
||||
let path =
|
||||
viz_path ~cachedir
|
||||
~viz_typ
|
||||
|
@ -153,7 +157,7 @@ module Viz_aux = struct
|
|||
Error (`Msg (Fmt.str "viz '%s': There exist no version of the requested \
|
||||
visualization"
|
||||
(viz_type_to_string viz_typ)))
|
||||
else
|
||||
else
|
||||
aux @@ pred current_version
|
||||
)
|
||||
in
|
||||
|
@ -162,7 +166,7 @@ module Viz_aux = struct
|
|||
let get_viz_version_from_dirs ~cachedir ~viz_typ =
|
||||
let ( >>= ) = Result.bind in
|
||||
Bos.OS.Dir.contents cachedir >>= fun versioned_dirs ->
|
||||
let max_cached_version =
|
||||
let max_cached_version =
|
||||
let viz_typ_str = viz_type_to_string viz_typ ^ "_" in
|
||||
versioned_dirs
|
||||
|> List.filter_map (fun versioned_dir ->
|
||||
|
@ -171,7 +175,7 @@ module Viz_aux = struct
|
|||
Logs.warn (fun m -> m "%s" err);
|
||||
None
|
||||
| Ok false -> None
|
||||
| Ok true ->
|
||||
| Ok true ->
|
||||
let dir_str = Fpath.filename versioned_dir in
|
||||
if not (String.starts_with ~prefix:viz_typ_str dir_str) then
|
||||
None
|
||||
|
@ -199,6 +203,10 @@ module Viz_aux = struct
|
|||
|
||||
let hash_viz_input ~uuid typ db =
|
||||
let open Builder_db in
|
||||
let hex cstruct =
|
||||
let `Hex hex_str = Hex.of_cstruct cstruct in
|
||||
hex_str
|
||||
in
|
||||
main_binary_of_uuid uuid db >>= fun main_binary ->
|
||||
Model.build uuid db
|
||||
|> if_error "Error getting build" >>= fun (build_id, _build) ->
|
||||
|
@ -206,29 +214,29 @@ module Viz_aux = struct
|
|||
|> if_error "Error getting build artifacts" >>= fun artifacts ->
|
||||
match typ with
|
||||
| `Treemap ->
|
||||
let debug_binary =
|
||||
let bin = Fpath.base main_binary.filepath in
|
||||
let debug_binary =
|
||||
let bin = Fpath.base main_binary.localpath in
|
||||
List.find_opt
|
||||
(fun p -> Fpath.(equal (bin + "debug") (base p.filepath)))
|
||||
(fun p -> Fpath.(equal (bin + "debug") (base p.localpath)))
|
||||
artifacts
|
||||
in
|
||||
begin
|
||||
Model.not_found debug_binary
|
||||
|> not_found_error >>= fun debug_binary ->
|
||||
debug_binary.sha256
|
||||
|> Ohex.encode
|
||||
|> hex
|
||||
|> Lwt_result.return
|
||||
end
|
||||
| `Dependencies ->
|
||||
end
|
||||
| `Dependencies ->
|
||||
let opam_switch =
|
||||
List.find_opt
|
||||
(fun p -> Fpath.(equal (v "opam-switch") (base p.filepath)))
|
||||
(fun p -> Fpath.(equal (v "opam-switch") (base p.localpath)))
|
||||
artifacts
|
||||
in
|
||||
Model.not_found opam_switch
|
||||
|> not_found_error >>= fun opam_switch ->
|
||||
opam_switch.sha256
|
||||
|> Ohex.encode
|
||||
|> hex
|
||||
|> Lwt_result.return
|
||||
|
||||
let try_load_cached_visualization ~cachedir ~uuid viz_typ db =
|
||||
|
@ -243,7 +251,7 @@ module Viz_aux = struct
|
|||
|> Lwt.return
|
||||
|> if_error "Error finding a version of the requested visualization")
|
||||
>>= fun viz_path ->
|
||||
Lwt_result.catch (fun () ->
|
||||
Lwt_result.catch (
|
||||
Lwt_io.with_file ~mode:Lwt_io.Input
|
||||
(Fpath.to_string viz_path)
|
||||
Lwt_io.read
|
||||
|
@ -254,17 +262,8 @@ module Viz_aux = struct
|
|||
end
|
||||
|
||||
|
||||
let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||
let builds ~all ?(filter_builds_later_than = 0) req =
|
||||
let than =
|
||||
if filter_builds_later_than = 0 then
|
||||
Ptime.epoch
|
||||
else
|
||||
let n = Ptime.Span.v (filter_builds_later_than, 0L) in
|
||||
let now = Ptime_clock.now () in
|
||||
Ptime.Span.sub (Ptime.to_span now) n |> Ptime.of_span |>
|
||||
Option.fold ~none:Ptime.epoch ~some:Fun.id
|
||||
in
|
||||
let routes ~datadir ~cachedir ~configdir =
|
||||
let builds req =
|
||||
Dream.sql req Model.jobs_with_section_synopsis
|
||||
|> if_error "Error getting jobs"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
|
||||
|
@ -277,26 +276,20 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
|||
r >>= fun acc ->
|
||||
Dream.sql req (Model.build_with_main_binary job_id platform) >>= function
|
||||
| Some (build, artifact) ->
|
||||
if Ptime.is_later ~than build.finish then
|
||||
Lwt_result.return ((platform, build, artifact) :: acc)
|
||||
else
|
||||
Lwt_result.return acc
|
||||
Lwt_result.return ((platform, build, artifact) :: acc)
|
||||
| None ->
|
||||
Log.warn (fun m -> m "Job without builds: %s" job_name);
|
||||
Lwt_result.return acc)
|
||||
ps (Lwt_result.return []) >>= fun platform_builds ->
|
||||
if platform_builds = [] then
|
||||
Lwt_result.return acc
|
||||
else
|
||||
let v = (job_name, synopsis, platform_builds) in
|
||||
let section = Option.value ~default:"Uncategorized" section in
|
||||
Lwt_result.return (Utils.String_map.add_or_create section v acc))
|
||||
let v = (job_name, synopsis, platform_builds) in
|
||||
let section = Option.value ~default:"Uncategorized" section in
|
||||
Lwt_result.return (Utils.String_map.add_or_create section v acc))
|
||||
jobs
|
||||
(Lwt_result.return Utils.String_map.empty)
|
||||
|> if_error "Error getting jobs"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
|
||||
>>= fun jobs ->
|
||||
Views.Builds.make ~all jobs |> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
Views.Builds.make jobs |> string_of_html |> Dream.html |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let job req =
|
||||
|
@ -424,15 +417,15 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
|||
|> if_error ~status:`Not_Found "File not found" >>= fun filepath ->
|
||||
Dream.sql req (Model.build_artifact build filepath)
|
||||
|> if_error "Error getting build artifact" >>= fun file ->
|
||||
let etag = Base64.encode_string file.Builder_db.sha256 in
|
||||
let etag = Base64.encode_string (Cstruct.to_string file.Builder_db.sha256) in
|
||||
match if_none_match with
|
||||
| Some etag' when etag = etag' ->
|
||||
Dream.empty `Not_Modified |> Lwt_result.ok
|
||||
| _ ->
|
||||
Model.build_artifact_data datadir file
|
||||
|> if_error "Error getting build artifact"
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a: %a"
|
||||
Fpath.pp file.Builder_db.filepath
|
||||
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a in %a: %a"
|
||||
Fpath.pp file.Builder_db.filepath Fpath.pp file.Builder_db.localpath
|
||||
pp_error e)) >>= fun data ->
|
||||
let headers = [
|
||||
"Content-Type", mime_lookup file.Builder_db.filepath;
|
||||
|
@ -482,19 +475,13 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
|||
|> Option.to_result ~none:(`Msg "bad finish time") |> Result.map Int64.of_int
|
||||
|> Lwt.return |> if_error "Internal server error" >>= fun finish ->
|
||||
Dream.stream ~headers:["Content-Type", "application/tar+gzip"]
|
||||
(fun stream ->
|
||||
let+ r = Dream_tar.targz_response datadir finish artifacts stream in
|
||||
match r with
|
||||
| Ok () -> ()
|
||||
| Error _ ->
|
||||
Log.warn (fun m -> m "error assembling gzipped tar archive");
|
||||
())
|
||||
(Dream_tar.targz_response datadir finish artifacts)
|
||||
|> Lwt_result.ok
|
||||
in
|
||||
|
||||
let upload req =
|
||||
let* body = Dream.body req in
|
||||
Builder.Asn.exec_of_str body |> Lwt.return
|
||||
Builder.Asn.exec_of_cs (Cstruct.of_string body) |> Lwt.return
|
||||
|> if_error ~status:`Bad_Request "Bad request"
|
||||
~log:(fun e ->
|
||||
Log.warn (fun m -> m "Received bad builder ASN.1");
|
||||
|
@ -526,7 +513,7 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
|||
Dream.query req "sha256" |> Option.to_result ~none:(`Msg "Missing sha256 query parameter")
|
||||
|> Lwt.return
|
||||
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash_hex ->
|
||||
begin try Ohex.decode hash_hex |> Lwt_result.return
|
||||
begin try Hex.to_cstruct (`Hex hash_hex) |> Lwt_result.return
|
||||
with Invalid_argument e -> Lwt_result.fail (`Msg ("Bad hex: " ^ e))
|
||||
end
|
||||
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash ->
|
||||
|
@ -619,11 +606,27 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
|||
>>= fun () -> Dream.respond "" |> Lwt_result.ok
|
||||
in
|
||||
|
||||
let redirect_parent req =
|
||||
let queries = Dream.all_queries req in
|
||||
let parent_url =
|
||||
let parent_path =
|
||||
Dream.target req
|
||||
|> Utils.Path.of_url
|
||||
|> List.rev |> List.tl |> List.rev
|
||||
in
|
||||
Utils.Path.to_url ~path:parent_path ~queries
|
||||
in
|
||||
Dream.redirect ~status:`Temporary_Redirect req parent_url
|
||||
|> Lwt_result.ok
|
||||
in
|
||||
|
||||
let w f req = or_error_response (f req) in
|
||||
|
||||
[
|
||||
`Get, "/", (w (builds ~all:false ~filter_builds_later_than:expired_jobs));
|
||||
`Get, "/", (w builds);
|
||||
`Get, "/job", (w redirect_parent);
|
||||
`Get, "/job/:job", (w job);
|
||||
`Get, "/job/:job/build", (w redirect_parent);
|
||||
`Get, "/job/:job/failed", (w job_with_failed);
|
||||
`Get, "/job/:job/build/latest/**", (w redirect_latest);
|
||||
`Get, "/job/:job/build/latest", (w redirect_latest_no_slash);
|
||||
|
@ -634,9 +637,8 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
|||
`Get, "/job/:job/build/:build/vizdependencies", (w @@ job_build_viz `Dependencies);
|
||||
`Get, "/job/:job/build/:build/script", (w (job_build_static_file `Script));
|
||||
`Get, "/job/:job/build/:build/console", (w (job_build_static_file `Console));
|
||||
`Get, "/job/:job/build/:build/all.tar.gz", (w job_build_targz);
|
||||
`Get, "/failed-builds", (w failed_builds);
|
||||
`Get, "/all-builds", (w (builds ~all:true));
|
||||
`Get, "/job/:job/build/:build/all.tar.gz", (w job_build_targz);
|
||||
`Get, "/hash", (w hash);
|
||||
`Get, "/compare/:build_left/:build_right", (w compare_builds);
|
||||
`Post, "/upload", (Authorization.authenticate (w upload));
|
||||
|
@ -672,7 +674,7 @@ module Middleware = struct
|
|||
let queries = Dream.all_queries req in
|
||||
let url = Utils.Path.to_url ~path ~queries in
|
||||
(*> Note: See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Location*)
|
||||
Dream.redirect ~status:`Moved_Permanently req url
|
||||
Dream.redirect ~status:`Permanent_Redirect req url
|
||||
| _ (* /... *) -> handler req
|
||||
|
||||
end
|
||||
|
|
144
lib/dream_tar.ml
144
lib/dream_tar.ml
|
@ -1,36 +1,47 @@
|
|||
module High : sig
|
||||
type t
|
||||
type 'a s = 'a Lwt.t
|
||||
open Lwt.Infix
|
||||
|
||||
external inj : 'a s -> ('a, t) Tar.io = "%identity"
|
||||
external prj : ('a, t) Tar.io -> 'a s = "%identity"
|
||||
end = struct
|
||||
type t
|
||||
type 'a s = 'a Lwt.t
|
||||
module Writer = struct
|
||||
type out_channel =
|
||||
{ mutable gz : Gz.Def.encoder
|
||||
; ic : Cstruct.t
|
||||
; oc : Cstruct.t
|
||||
; stream : Dream.stream }
|
||||
|
||||
external inj : 'a -> 'b = "%identity"
|
||||
external prj : 'a -> 'b = "%identity"
|
||||
type 'a t = 'a Lwt.t
|
||||
|
||||
let really_write ({ oc; stream; _ } as state) cs =
|
||||
let rec until_await gz =
|
||||
match Gz.Def.encode gz with
|
||||
| `Await gz -> state.gz <- gz ; Lwt.return_unit
|
||||
| `Flush gz ->
|
||||
let max = Cstruct.length oc - Gz.Def.dst_rem gz in
|
||||
let str = Cstruct.copy oc 0 max in
|
||||
Dream.write stream str >>= fun () ->
|
||||
let { Cstruct.buffer; off= cs_off; len= cs_len; } = oc in
|
||||
until_await (Gz.Def.dst gz buffer cs_off cs_len)
|
||||
| `End _gz -> assert false in
|
||||
if Cstruct.length cs = 0
|
||||
then Lwt.return_unit
|
||||
else ( let { Cstruct.buffer; off; len; } = cs in
|
||||
let gz = Gz.Def.src state.gz buffer off len in
|
||||
until_await gz )
|
||||
end
|
||||
|
||||
let value v = Tar.High (High.inj v)
|
||||
module HW = Tar.HeaderWriter(Lwt)(Writer)
|
||||
|
||||
let ok_value v = value (Lwt_result.ok v)
|
||||
|
||||
let run t stream =
|
||||
let rec run : type a. (a, 'err, High.t) Tar.t -> (a, 'err) result Lwt.t =
|
||||
function
|
||||
| Tar.Write str ->
|
||||
(* Can this not fail?!? Obviously, it can, but we never know?? *)
|
||||
Lwt_result.ok (Dream.write stream str)
|
||||
| Tar.Seek _ | Tar.Read _ | Tar.Really_read _ -> assert false
|
||||
| Tar.Return value -> Lwt.return value
|
||||
| Tar.High value -> High.prj value
|
||||
| Tar.Bind (x, f) ->
|
||||
let open Lwt_result.Syntax in
|
||||
let* v = run x in
|
||||
run (f v)
|
||||
let write_block (header : Tar.Header.t) lpath ({ Writer.ic= buf; _ } as state) =
|
||||
HW.write ~level:Tar.Header.Ustar header state >>= fun () ->
|
||||
Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string lpath) >>= fun ic ->
|
||||
let rec loop () =
|
||||
let { Cstruct.buffer; off; len; } = buf in
|
||||
Lwt_io.read_into_bigstring ic buffer off len >>= function
|
||||
| 0 -> Lwt.return ()
|
||||
| len' ->
|
||||
Writer.really_write state (Cstruct.sub buf 0 len') >>= fun () ->
|
||||
loop ()
|
||||
in
|
||||
run t
|
||||
loop () >>= fun () ->
|
||||
Writer.really_write state (Tar.Header.zero_padding header)
|
||||
|
||||
let header_of_file mod_time (file : Builder_db.file) =
|
||||
let file_mode = if Fpath.is_prefix Fpath.(v "bin/") file.filepath then
|
||||
|
@ -40,53 +51,38 @@ let header_of_file mod_time (file : Builder_db.file) =
|
|||
in
|
||||
Tar.Header.make ~file_mode ~mod_time (Fpath.to_string file.filepath) (Int64.of_int file.size)
|
||||
|
||||
let contents datadir file : unit -> (string option, _, _) Tar.t =
|
||||
let state = ref `Initial in
|
||||
let dispenser () =
|
||||
let ( let* ) = Tar.( let* ) in
|
||||
let src = Fpath.append datadir (Model.artifact_path file) in
|
||||
let* state' =
|
||||
match !state with
|
||||
| `Initial ->
|
||||
let* fd = ok_value (Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string src)) in
|
||||
let s = `Active fd in
|
||||
state := s; Tar.return (Ok s)
|
||||
| `Active _ | `Closed as s -> Tar.return (Ok s)
|
||||
let targz_response datadir finish (files : Builder_db.file list) (stream : Dream.stream) =
|
||||
let state =
|
||||
let ic = Cstruct.create (4 * 4 * 1024) in
|
||||
let oc = Cstruct.create 4096 in
|
||||
let gz =
|
||||
let w = De.Lz77.make_window ~bits:15 in
|
||||
let q = De.Queue.create 0x1000 in
|
||||
let mtime = Int32.of_float (Unix.gettimeofday ()) in
|
||||
let gz = Gz.Def.encoder `Manual `Manual ~mtime Gz.Unix ~q ~w ~level:4 in
|
||||
let { Cstruct.buffer; off; len; } = oc in
|
||||
Gz.Def.dst gz buffer off len
|
||||
in
|
||||
match state' with
|
||||
| `Closed -> Tar.return (Ok None)
|
||||
| `Active fd ->
|
||||
let* data = ok_value (Lwt_io.read ~count:65536 fd) in
|
||||
if String.length data = 0 then begin
|
||||
state := `Closed;
|
||||
let* () = ok_value (Lwt_io.close fd) in
|
||||
Tar.return (Ok None)
|
||||
end else
|
||||
Tar.return (Ok (Some data))
|
||||
{ Writer.gz; ic; oc; stream; }
|
||||
in
|
||||
dispenser
|
||||
|
||||
let entries datadir finish files =
|
||||
let files =
|
||||
List.map (fun file ->
|
||||
let hdr = header_of_file finish file in
|
||||
let level = Some Tar.Header.Posix in
|
||||
(level, hdr, contents datadir file)
|
||||
)
|
||||
files
|
||||
Lwt_list.iter_s (fun file ->
|
||||
let hdr = header_of_file finish file in
|
||||
write_block hdr Fpath.(datadir // file.localpath) state)
|
||||
files >>= fun () ->
|
||||
Writer.really_write state Tar.Header.zero_block >>= fun () ->
|
||||
Writer.really_write state Tar.Header.zero_block >>= fun () ->
|
||||
(* assert (Gz.Def.encode gz = `Await) *)
|
||||
let rec until_end gz = match Gz.Def.encode gz with
|
||||
| `Await _gz -> assert false
|
||||
| `Flush gz | `End gz as flush_or_end ->
|
||||
let max = Cstruct.length state.oc - Gz.Def.dst_rem gz in
|
||||
let str = Cstruct.copy state.oc 0 max in
|
||||
Dream.write stream str >>= fun () -> match flush_or_end with
|
||||
| `Flush gz ->
|
||||
let { Cstruct.buffer; off= cs_off; len= cs_len; } = state.oc in
|
||||
until_end (Gz.Def.dst gz buffer cs_off cs_len)
|
||||
| `End _ -> Lwt.return_unit
|
||||
in
|
||||
let files = ref files in
|
||||
fun () -> match !files with
|
||||
| [] -> Tar.return (Ok None)
|
||||
| f :: fs -> files := fs; Tar.return (Ok (Some f))
|
||||
|
||||
let targz_response datadir finish files stream =
|
||||
let entries : (_, _) Tar.entries = entries datadir finish files in
|
||||
let global_hdr =
|
||||
Tar.Header.Extended.make
|
||||
~comment:"Tar file produced by builder-web.%%VERSION_NUM%%"
|
||||
()
|
||||
in
|
||||
let finish32 = Int64.to_int32 finish in
|
||||
Logs.err (fun m -> m "finished at %ld (%Ld)" finish32 finish);
|
||||
run (Tar_gz.out_gzipped ~level:9 ~mtime:finish32 Gz.Unix (Tar.out ~global_hdr entries)) stream
|
||||
until_end (Gz.Def.src state.gz De.bigstring_empty 0 0) >>= fun () ->
|
||||
Dream.flush stream >>= fun () ->
|
||||
Dream.close stream
|
||||
|
|
24
lib/dune
24
lib/dune
|
@ -1,5 +1,23 @@
|
|||
(library
|
||||
(name builder_web)
|
||||
(libraries builder builder_db dream tyxml bos duration ohex caqti-lwt
|
||||
opamdiff ptime.clock.os cmarkit tar tar.gz tar-unix owee solo5-elftool decompress.de
|
||||
decompress.gz uri digestif))
|
||||
(libraries
|
||||
builder
|
||||
builder_db
|
||||
dream
|
||||
tyxml
|
||||
bos
|
||||
duration
|
||||
hex
|
||||
caqti-lwt
|
||||
opamdiff
|
||||
ptime.clock.os
|
||||
omd
|
||||
tar
|
||||
owee
|
||||
solo5-elftool
|
||||
decompress.de
|
||||
decompress.gz
|
||||
uri
|
||||
conan-unix
|
||||
conan-database
|
||||
))
|
||||
|
|
137
lib/model.ml
137
lib/model.ml
|
@ -19,14 +19,6 @@ let not_found = function
|
|||
| Some v -> Lwt_result.return v
|
||||
|
||||
let staging datadir = Fpath.(datadir / "_staging")
|
||||
let artifact_path artifact =
|
||||
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
|
||||
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
|
||||
(* NOTE: We add the prefix to reduce the number of files in a directory - a
|
||||
workaround for inferior filesystems. We can easily revert this by changing
|
||||
this function and adding a migration. *)
|
||||
let prefix = String.sub sha256 0 2 in
|
||||
Fpath.(v "_artifacts" / prefix / sha256)
|
||||
|
||||
let read_file datadir filepath =
|
||||
let filepath = Fpath.(datadir // filepath) in
|
||||
|
@ -42,7 +34,7 @@ let read_file datadir filepath =
|
|||
Log.warn (fun m -> m "Error reading local file %a: %s"
|
||||
Fpath.pp filepath (Unix.error_message e));
|
||||
Lwt.return_error (`File_error filepath)
|
||||
| e -> Lwt.reraise e)
|
||||
| e -> Lwt.fail e)
|
||||
|
||||
let build_artifact build filepath (module Db : CONN) =
|
||||
Db.find_opt Builder_db.Build_artifact.get_by_build_uuid (build, filepath)
|
||||
|
@ -52,14 +44,14 @@ let build_artifact_by_id id (module Db : CONN) =
|
|||
Db.find Builder_db.Build_artifact.get id
|
||||
|
||||
let build_artifact_data datadir file =
|
||||
read_file datadir (artifact_path file)
|
||||
read_file datadir file.Builder_db.localpath
|
||||
|
||||
let build_artifacts build (module Db : CONN) =
|
||||
Db.collect_list Builder_db.Build_artifact.get_all_by_build build >|=
|
||||
List.map snd
|
||||
|
||||
let solo5_manifest datadir file =
|
||||
let buf = Owee_buf.map_binary Fpath.(to_string (datadir // artifact_path file)) in
|
||||
let buf = Owee_buf.map_binary Fpath.(to_string (datadir // file.Builder_db.localpath)) in
|
||||
Solo5_elftool.query_manifest buf |> Result.to_option
|
||||
|
||||
let platforms_of_job id (module Db : CONN) =
|
||||
|
@ -204,42 +196,46 @@ let cleanup_staging datadir (module Db : Caqti_lwt.CONNECTION) =
|
|||
(cleanup_staged staged))
|
||||
stageds
|
||||
|
||||
let save path data =
|
||||
let save file data =
|
||||
let open Lwt.Infix in
|
||||
Lwt.catch
|
||||
(fun () ->
|
||||
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string path) >>= fun oc ->
|
||||
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string file) >>= fun oc ->
|
||||
Lwt_io.write oc data >>= fun () ->
|
||||
Lwt_io.close oc
|
||||
|> Lwt_result.ok)
|
||||
(function
|
||||
| Unix.Unix_error (e, _, _) ->
|
||||
Lwt_result.fail (`Msg (Unix.error_message e))
|
||||
| e -> Lwt.reraise e)
|
||||
| e -> Lwt.fail e)
|
||||
|
||||
let save_artifacts staging artifacts =
|
||||
List.fold_left
|
||||
(fun r (file, data) ->
|
||||
r >>= fun () ->
|
||||
let sha256 = Ohex.encode file.Builder_db.sha256 in
|
||||
let destpath = Fpath.(staging / sha256) in
|
||||
save destpath data)
|
||||
(Lwt_result.return ())
|
||||
artifacts
|
||||
let save_file dir staging (filepath, data) =
|
||||
let size = String.length data in
|
||||
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||
let localpath = Fpath.append dir filepath in
|
||||
let destpath = Fpath.append staging filepath in
|
||||
Lwt_result.lift (Bos.OS.Dir.create (Fpath.parent destpath)) >>= fun _ ->
|
||||
save destpath data >|= fun () ->
|
||||
{ Builder_db.filepath; localpath; sha256; size }
|
||||
|
||||
let commit_files datadir staging_dir job_name uuid artifacts =
|
||||
(* First we move the artifacts *)
|
||||
let save_files dir staging files =
|
||||
List.fold_left
|
||||
(fun r artifact ->
|
||||
r >>= fun () ->
|
||||
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
|
||||
let src = Fpath.(staging_dir / sha256) in
|
||||
let dest = Fpath.(datadir // artifact_path artifact) in
|
||||
Lwt.return (Bos.OS.Dir.create (Fpath.parent dest)) >>= fun _created ->
|
||||
Lwt.return (Bos.OS.Path.move ~force:true src dest))
|
||||
(Lwt_result.return ())
|
||||
artifacts >>= fun () ->
|
||||
(* Now the staging dir only contains script & console *)
|
||||
(fun r file ->
|
||||
r >>= fun acc ->
|
||||
save_file dir staging file >>= fun file ->
|
||||
Lwt_result.return (file :: acc))
|
||||
(Lwt_result.return [])
|
||||
files
|
||||
|
||||
let save_all staging_dir (job : Builder.script_job) uuid artifacts =
|
||||
let build_dir = Fpath.(v job.Builder.name / Uuidm.to_string uuid) in
|
||||
let output_dir = Fpath.(build_dir / "output")
|
||||
and staging_output_dir = Fpath.(staging_dir / "output") in
|
||||
Lwt.return (Bos.OS.Dir.create staging_output_dir) >>= fun _ ->
|
||||
save_files output_dir staging_output_dir artifacts >>= fun artifacts ->
|
||||
Lwt_result.return artifacts
|
||||
|
||||
let commit_files datadir staging_dir job_name uuid =
|
||||
let job_dir = Fpath.(datadir / job_name) in
|
||||
let dest = Fpath.(job_dir / Uuidm.to_string uuid) in
|
||||
Lwt.return (Bos.OS.Dir.create job_dir) >>= fun _ ->
|
||||
|
@ -253,15 +249,8 @@ let infer_section_and_synopsis artifacts =
|
|||
in
|
||||
let infer_section switch root =
|
||||
let root_pkg = root.OpamPackage.name in
|
||||
let is_unikernel =
|
||||
(* since mirage 4.2.0, the x-mirage-opam-lock-location is emitted *)
|
||||
Option.value ~default:false
|
||||
(Option.map (fun opam ->
|
||||
Option.is_some (OpamFile.OPAM.extended opam "x-mirage-opam-lock-location" Fun.id))
|
||||
(OpamPackage.Name.Map.find_opt root_pkg switch.OpamFile.SwitchExport.overlays))
|
||||
in
|
||||
let root_pkg_name = OpamPackage.Name.to_string root_pkg in
|
||||
if is_unikernel || Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
|
||||
if Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
|
||||
let metrics_influx =
|
||||
let influx = OpamPackage.Name.of_string "metrics-influx" in
|
||||
OpamPackage.Set.exists (fun p -> OpamPackage.Name.equal p.OpamPackage.name influx)
|
||||
|
@ -306,8 +295,7 @@ let compute_input_id artifacts =
|
|||
get_hash (Fpath.v "build-environment"),
|
||||
get_hash (Fpath.v "system-packages")
|
||||
with
|
||||
| Some a, Some b, Some c ->
|
||||
Some Digestif.SHA256.(to_raw_string (digestv_string [a;b;c]))
|
||||
| Some a, Some b, Some c -> Some (Mirage_crypto.Hash.SHA256.digest (Cstruct.concat [a;b;c]))
|
||||
| _ -> None
|
||||
|
||||
let save_console_and_script staging_dir job_name uuid console script =
|
||||
|
@ -329,25 +317,6 @@ let prepare_staging staging_dir =
|
|||
then Lwt_result.fail (`Msg "build directory already exists")
|
||||
else Lwt_result.return ()
|
||||
|
||||
(* saving:
|
||||
- for each artifact compute its sha256 checksum -- calling Lwt.pause in
|
||||
between
|
||||
- lookup artifact sha256 in the database and filter them out of the list: not_in_db
|
||||
- mkdir -p _staging/uuid/
|
||||
- save console & script to _staging/uuid/
|
||||
- save each artifact in not_in_db as _staging/uuid/sha256
|
||||
committing:
|
||||
- for each artifact mv _staging/uuid/sha256 _artifacts/sha256
|
||||
(or _artifacts/prefix(sha256)/sha256 where prefix(sha256) is the first two hex digits in sha256)
|
||||
- now _staging/uuid only contains console & script so we mv _staging/uuid _staging/job/uuid
|
||||
potential issues:
|
||||
- race condition in uploading same artifact:
|
||||
* if the artifact already exists in the database and thus filesystem then nothing is done
|
||||
* if the artifact is added to the database and/or filesystem we atomically overwrite it
|
||||
- input_id depends on a sort order?
|
||||
*)
|
||||
|
||||
|
||||
let add_build
|
||||
~datadir
|
||||
~cachedir
|
||||
|
@ -368,35 +337,16 @@ let add_build
|
|||
e)
|
||||
x
|
||||
in
|
||||
let not_interesting p =
|
||||
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes"
|
||||
let artifacts_to_preserve =
|
||||
let not_interesting p =
|
||||
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes"
|
||||
in
|
||||
List.filter (fun (p, _) -> not (not_interesting p)) raw_artifacts
|
||||
in
|
||||
begin
|
||||
List.fold_left
|
||||
(fun r (filepath, data) ->
|
||||
r >>= fun acc ->
|
||||
if not_interesting filepath then
|
||||
Lwt_result.return acc
|
||||
else
|
||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data))
|
||||
and size = String.length data in
|
||||
Lwt_result.ok (Lwt.pause ()) >|= fun () ->
|
||||
({ filepath; sha256; size }, data) :: acc)
|
||||
(Lwt_result.return [])
|
||||
raw_artifacts
|
||||
end >>= fun artifacts ->
|
||||
or_cleanup (prepare_staging staging_dir) >>= fun () ->
|
||||
or_cleanup (save_console_and_script staging_dir job_name uuid console job.Builder.script)
|
||||
>>= fun (console, script) ->
|
||||
List.fold_left
|
||||
(fun r ((f, _) as artifact) ->
|
||||
r >>= fun acc ->
|
||||
Db.find Builder_db.Build_artifact.exists f.sha256 >|= fun exists ->
|
||||
if exists then acc else artifact :: acc)
|
||||
(Lwt_result.return [])
|
||||
artifacts >>= fun artifacts_to_save ->
|
||||
or_cleanup (save_artifacts staging_dir artifacts_to_save) >>= fun () ->
|
||||
let artifacts = List.map fst artifacts in
|
||||
or_cleanup (save_all staging_dir job uuid artifacts_to_preserve) >>= fun artifacts ->
|
||||
let r =
|
||||
Db.start () >>= fun () ->
|
||||
Db.exec Job.try_add job_name >>= fun () ->
|
||||
|
@ -464,8 +414,8 @@ let add_build
|
|||
Db.exec Build_artifact.add (file, id))
|
||||
(Lwt_result.return ())
|
||||
remaining_artifacts_to_add >>= fun () ->
|
||||
commit_files datadir staging_dir job_name uuid (List.map fst artifacts_to_save) >>= fun () ->
|
||||
Db.commit () >|= fun () ->
|
||||
Db.commit () >>= fun () ->
|
||||
commit_files datadir staging_dir job_name uuid >|= fun () ->
|
||||
main_binary
|
||||
in
|
||||
Lwt_result.bind_lwt_error (or_cleanup r)
|
||||
|
@ -484,7 +434,7 @@ let add_build
|
|||
and uuid = Uuidm.to_string uuid
|
||||
and job = job.name
|
||||
and platform = job.platform
|
||||
and sha256 = Ohex.encode main_binary.sha256
|
||||
and `Hex sha256 = Hex.of_cstruct main_binary.sha256
|
||||
in
|
||||
let fp_str p = Fpath.(to_string (datadir // p)) in
|
||||
let args =
|
||||
|
@ -494,8 +444,7 @@ let add_build
|
|||
"--uuid=" ^ uuid ; "--platform=" ^ platform ;
|
||||
"--cache-dir=" ^ Fpath.to_string cachedir ;
|
||||
"--data-dir=" ^ Fpath.to_string datadir ;
|
||||
"--main-binary-filepath=" ^ Fpath.to_string main_binary.filepath ;
|
||||
fp_str Fpath.(datadir // artifact_path main_binary) ])
|
||||
fp_str main_binary.localpath ])
|
||||
in
|
||||
Log.debug (fun m -> m "executing hooks with %s" args);
|
||||
let dir = Fpath.(configdir / "upload-hooks") in
|
||||
|
|
|
@ -5,7 +5,6 @@ val pp_error : Format.formatter -> error -> unit
|
|||
val not_found : 'a option -> ('a, [> `Not_found ]) result Lwt.t
|
||||
|
||||
val staging : Fpath.t -> Fpath.t
|
||||
val artifact_path : Builder_db.file -> Fpath.t
|
||||
|
||||
val cleanup_staging : Fpath.t -> Caqti_lwt.connection ->
|
||||
(unit, [> `Msg of string ]) result Lwt.t
|
||||
|
@ -31,9 +30,9 @@ val build : Uuidm.t -> Caqti_lwt.connection ->
|
|||
([`build] Builder_db.id * Builder_db.Build.t, [> error ]) result Lwt.t
|
||||
|
||||
val build_with_main_binary : [`job] Builder_db.id -> string -> Caqti_lwt.connection ->
|
||||
((Builder_db.Build.t * Builder_db.file) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
((Builder_db.Build.t * Builder_db.file option) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val build_hash : string -> Caqti_lwt.connection ->
|
||||
val build_hash : Cstruct.t -> Caqti_lwt.connection ->
|
||||
((string * Builder_db.Build.t) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||
|
||||
val build_exists : Uuidm.t -> Caqti_lwt.connection ->
|
||||
|
|
103
lib/utils.ml
103
lib/utils.ml
|
@ -45,30 +45,85 @@ let compare_pkgs p1 p2 =
|
|||
in
|
||||
diff_map (parse_pkgs p1) (parse_pkgs p2)
|
||||
|
||||
let md_to_html ?adjust_heading ?(safe = true) data =
|
||||
let open Cmarkit in
|
||||
let doc = Doc.of_string ~strict:false ~heading_auto_ids:true data in
|
||||
let doc =
|
||||
Option.fold ~none:doc
|
||||
~some:(fun lvl ->
|
||||
let block _m = function
|
||||
| Block.Heading (h, meta) ->
|
||||
let open Block.Heading in
|
||||
let level = level h
|
||||
and id = id h
|
||||
and layout = layout h
|
||||
and inline = inline h
|
||||
in
|
||||
let h' = make ?id ~layout ~level:(level + lvl) inline in
|
||||
Mapper.ret (Block.Heading (h', meta))
|
||||
| Block.Blocks _ -> Mapper.default
|
||||
| x -> Mapper.ret x
|
||||
in
|
||||
let mapper = Mapper.make ~block () in
|
||||
Mapper.map_doc mapper doc)
|
||||
adjust_heading
|
||||
in
|
||||
Cmarkit_html.of_doc ~safe doc
|
||||
module Omd = struct
|
||||
|
||||
let make_safe omd =
|
||||
let rec safe_block = function
|
||||
| Omd.Paragraph (attr, inline) ->
|
||||
safe_inline inline
|
||||
|> Option.map (fun inline -> Omd.Paragraph (attr, inline))
|
||||
| Omd.List (attr, typ, spacing, blocks) ->
|
||||
let blocks = List.filter_map (fun b ->
|
||||
let b = List.filter_map safe_block b in
|
||||
if b = [] then None else Some b)
|
||||
blocks
|
||||
in
|
||||
if blocks = [] then None else
|
||||
Some (Omd.List (attr, typ, spacing, blocks))
|
||||
| Omd.Blockquote (attr, blocks) ->
|
||||
let blocks = List.filter_map safe_block blocks in
|
||||
if blocks = [] then None else
|
||||
Some (Omd.Blockquote (attr, blocks))
|
||||
| Omd.Heading (attr, level, inline) ->
|
||||
safe_inline inline
|
||||
|> Option.map (fun inline -> Omd.Heading (attr, level, inline))
|
||||
| Omd.Html_block _ -> None
|
||||
| Omd.Definition_list (attr, def_elts) ->
|
||||
let def_elts = List.filter_map safe_def_elts def_elts in
|
||||
if def_elts = [] then None else
|
||||
Some (Omd.Definition_list (attr, def_elts))
|
||||
| Omd.Code_block _
|
||||
| Omd.Thematic_break _ as v -> Some v
|
||||
and safe_def_elts { term ; defs } =
|
||||
let defs = List.filter_map safe_inline defs in
|
||||
safe_inline term
|
||||
|> Option.map (fun term -> { Omd.term ; defs })
|
||||
and safe_inline = function
|
||||
| Concat (attr, inline) ->
|
||||
Some (Concat (attr, List.filter_map safe_inline inline))
|
||||
| Emph (attr, inline) ->
|
||||
safe_inline inline
|
||||
|> Option.map (fun inline -> Omd.Emph (attr, inline))
|
||||
| Strong (attr, inline) ->
|
||||
safe_inline inline
|
||||
|> Option.map (fun inline -> Omd.Strong (attr, inline))
|
||||
| Link (attr, link) ->
|
||||
begin match safe_link link with
|
||||
| `No_label | `Relative -> safe_inline link.Omd.label
|
||||
| `Link l -> Some (Omd.Link (attr, l))
|
||||
end
|
||||
| Image (attr, link) ->
|
||||
begin match safe_link link with
|
||||
| `No_label | `Relative -> None
|
||||
| `Link l -> Some (Omd.Image (attr, l))
|
||||
end
|
||||
| Html _ -> None
|
||||
| Text _
|
||||
| Code _
|
||||
| Hard_break _
|
||||
| Soft_break _ as v -> Some v
|
||||
and safe_link ({ label ; destination ; _ } as l) =
|
||||
let absolute_link =
|
||||
String.(length destination >= 2 && equal (sub destination 0 2) "//") ||
|
||||
String.(length destination >= 7 && equal (sub destination 0 7) "http://") ||
|
||||
String.(length destination >= 8 && equal (sub destination 0 8) "https://")
|
||||
in
|
||||
if absolute_link then
|
||||
match safe_inline label with
|
||||
| None -> `No_label
|
||||
| Some label -> `Link { l with label }
|
||||
else
|
||||
`Relative
|
||||
in
|
||||
List.filter_map safe_block omd
|
||||
|
||||
let html_of_string markdown =
|
||||
markdown
|
||||
|> Omd.of_string
|
||||
|> make_safe
|
||||
|> Omd.to_html
|
||||
|
||||
end
|
||||
|
||||
module Path = struct
|
||||
|
||||
|
|
165
lib/views.ml
165
lib/views.ml
|
@ -98,7 +98,7 @@ let make_breadcrumbs nav =
|
|||
txtf "Job %s" job_name, Link.Job.make ~job_name ();
|
||||
(
|
||||
txtf "%a" pp_platform platform,
|
||||
Link.Job.make ~job_name ~queries ()
|
||||
Link.Job.make ~job_name ~queries ()
|
||||
)
|
||||
]
|
||||
| `Build (job_name, build) ->
|
||||
|
@ -122,7 +122,7 @@ let make_breadcrumbs nav =
|
|||
txtf "Comparison between %s@%a and %s@%a"
|
||||
job_left pp_ptime build_left.Builder_db.Build.start
|
||||
job_right pp_ptime build_right.Builder_db.Build.start,
|
||||
Link.Compare_builds.make
|
||||
Link.Compare_builds.make
|
||||
~left:build_left.uuid
|
||||
~right:build_right.uuid ()
|
||||
);
|
||||
|
@ -188,7 +188,7 @@ let artifact
|
|||
~basename
|
||||
~job_name
|
||||
~build
|
||||
~file:{ Builder_db.filepath; sha256; size }
|
||||
~file:{ Builder_db.filepath; localpath = _; sha256; size }
|
||||
=
|
||||
let artifact_link =
|
||||
Link.Job_build_artifact.make
|
||||
|
@ -202,7 +202,7 @@ let artifact
|
|||
else txtf "%a" Fpath.pp filepath
|
||||
];
|
||||
H.txt " ";
|
||||
H.code [txtf "SHA256:%s" (Ohex.encode sha256)];
|
||||
H.code [txtf "SHA256:%a" Hex.pp (Hex.of_cstruct sha256)];
|
||||
txtf " (%a)" Fmt.byte_size size;
|
||||
]
|
||||
|
||||
|
@ -218,7 +218,7 @@ let page_not_found ~target ~referer =
|
|||
| None -> []
|
||||
| Some prev_url -> [
|
||||
H.p [
|
||||
H.txt "Go back to ";
|
||||
H.txt "Go back to ";
|
||||
H.a ~a:H.[ a_href prev_url ] [ H.txt prev_url ];
|
||||
];
|
||||
]
|
||||
|
@ -274,9 +274,9 @@ The filename suffix of the unikernel binary indicate the expected execution envi
|
|||
|
||||
A persistent link to the latest successful build is available as
|
||||
`/job/*jobname*/build/latest/`. Each build can be reproduced with
|
||||
[orb](https://github.com/robur-coop/orb/). The builds are scheduled and executed
|
||||
daily by [builder](https://github.com/robur-coop/builder/). This web interface is
|
||||
[builder-web](https://git.robur.coop/robur/builder-web/). Read further information
|
||||
[orb](https://github.com/roburio/orb/). The builds are scheduled and executed
|
||||
daily by [builder](https://github.com/roburio/builder/). This web interface is
|
||||
[builder-web](https://git.robur.io/robur/builder-web/). Read further information
|
||||
[on our project page](https://robur.coop/Projects/Reproducible_builds). This
|
||||
work has been funded by the European Union under the
|
||||
[NGI Pointer](https://pointer.ngi.eu) program. Contact team ATrobur.coop if you
|
||||
|
@ -285,7 +285,7 @@ have questions or suggestions.
|
|||
|
||||
let make_header =
|
||||
[
|
||||
H.Unsafe.data (Utils.md_to_html data);
|
||||
H.Unsafe.data (Utils.Omd.html_of_string data);
|
||||
H.form ~a:H.[a_action "/hash"; a_method `Get] [
|
||||
H.label [
|
||||
H.txt "Search artifact by SHA256";
|
||||
|
@ -319,13 +319,18 @@ have questions or suggestions.
|
|||
~build:latest_build.Builder_db.Build.uuid ()]
|
||||
[txtf "%a" pp_ptime latest_build.Builder_db.Build.start];
|
||||
H.txt " ";
|
||||
|
||||
]
|
||||
@ artifact
|
||||
~basename:true
|
||||
~job_name
|
||||
~build:latest_build
|
||||
~file:latest_artifact
|
||||
@ (match latest_artifact with
|
||||
| Some main_binary ->
|
||||
artifact
|
||||
~basename:true
|
||||
~job_name
|
||||
~build:latest_build
|
||||
~file:main_binary
|
||||
| None ->
|
||||
[ txtf "Build failure: %a" Builder.pp_execution_result
|
||||
latest_build.Builder_db.Build.result ]
|
||||
)
|
||||
@ [ H.br () ]
|
||||
|
||||
let make_jobs jobs =
|
||||
|
@ -356,23 +361,14 @@ have questions or suggestions.
|
|||
H.txt "View the latest failed builds ";
|
||||
H.a ~a:H.[a_href "/failed-builds"]
|
||||
[H.txt "here"];
|
||||
H.txt ".";
|
||||
H.txt "."
|
||||
]]
|
||||
|
||||
let make_all_or_active all =
|
||||
[ H.p [
|
||||
H.txt (if all then "View active jobs " else "View all jobs ");
|
||||
H.a ~a:H.[a_href (if all then "/" else "/all-builds")]
|
||||
[H.txt "here"];
|
||||
H.txt ".";
|
||||
]]
|
||||
|
||||
let make ~all section_job_map =
|
||||
let make section_job_map =
|
||||
layout ~title:"Reproducible OPAM builds"
|
||||
(make_header
|
||||
@ make_body section_job_map
|
||||
@ make_failed_builds
|
||||
@ make_all_or_active all)
|
||||
@ make_failed_builds)
|
||||
|
||||
end
|
||||
|
||||
|
@ -387,7 +383,7 @@ module Job = struct
|
|||
[
|
||||
H.h2 ~a:H.[a_id "readme"] [H.txt "README"];
|
||||
H.a ~a:H.[a_href "#builds"] [H.txt "Skip to builds"];
|
||||
H.Unsafe.data (Utils.md_to_html ~adjust_heading:2 data)
|
||||
H.Unsafe.data (Utils.Omd.html_of_string data)
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -397,7 +393,7 @@ module Job = struct
|
|||
check_icon build.Builder_db.Build.result;
|
||||
txtf " %s " build.platform;
|
||||
H.a ~a:H.[
|
||||
a_href @@ Link.Job_build.make
|
||||
a_href @@ Link.Job_build.make
|
||||
~job_name
|
||||
~build:build.Builder_db.Build.uuid () ]
|
||||
[
|
||||
|
@ -435,7 +431,7 @@ module Job = struct
|
|||
H.txt "." ]
|
||||
else
|
||||
H.p [
|
||||
H.txt "Including failed builds " ;
|
||||
H.txt "Including failed builds " ;
|
||||
H.a ~a:H.[
|
||||
a_href @@ Link.Job.make_failed ~job_name ~queries ()
|
||||
]
|
||||
|
@ -491,7 +487,7 @@ module Job_build = struct
|
|||
pp_devices block_devices pp_devices net_devices]
|
||||
in
|
||||
let aux (file:Builder_db.file) =
|
||||
let sha256_hex = Ohex.encode file.sha256 in
|
||||
let (`Hex sha256_hex) = Hex.of_cstruct file.sha256 in
|
||||
[
|
||||
H.dt [
|
||||
H.a ~a:H.[a_href @@ Link.Job_build_artifact.make
|
||||
|
@ -586,7 +582,7 @@ module Job_build = struct
|
|||
| Some b when not (Uuidm.equal build.uuid b.Builder_db.Build.uuid) ->
|
||||
[ H.li [ H.txt ctx;
|
||||
H.a ~a:[
|
||||
H.a_href @@ Link.Compare_builds.make
|
||||
H.a_href @@ Link.Compare_builds.make
|
||||
~left:b.uuid
|
||||
~right:build.uuid () ]
|
||||
[txtf "%a" pp_ptime b.start]]
|
||||
|
@ -683,10 +679,10 @@ module Job_build = struct
|
|||
font-weight: bold;\
|
||||
"
|
||||
]
|
||||
|
||||
|
||||
let make_viz_section ~job_name ~artifacts ~uuid =
|
||||
let viz_deps =
|
||||
let iframe =
|
||||
let viz_deps =
|
||||
let iframe =
|
||||
let src = Link.Job_build_artifact.make ~job_name ~build:uuid
|
||||
~artifact:`Viz_dependencies () in
|
||||
H.iframe ~a:H.[
|
||||
|
@ -697,11 +693,11 @@ module Job_build = struct
|
|||
in
|
||||
let descr_txt = "\
|
||||
This is an interactive visualization of dependencies, \
|
||||
focusing on how shared dependencies are.
|
||||
focusing on how shared dependencies are.
|
||||
|
||||
In the middle you see the primary package. \
|
||||
Edges shoot out to its direct \
|
||||
dependencies, including build dependencies.
|
||||
dependencies, including build dependencies.
|
||||
|
||||
From these direct dependencies, edges shoot out to sets \
|
||||
of their own respective direct dependencies. \
|
||||
|
@ -718,7 +714,7 @@ dependency.\
|
|||
[ iframe; H.br (); make_description descr_txt ]
|
||||
in
|
||||
let viz_treemap = lazy (
|
||||
let iframe =
|
||||
let iframe =
|
||||
let src = Link.Job_build_artifact.make ~job_name ~build:uuid
|
||||
~artifact:`Viz_treemap () in
|
||||
H.iframe ~a:H.[
|
||||
|
@ -730,7 +726,7 @@ dependency.\
|
|||
let descr_txt = "\
|
||||
This interactive treemap shows the space-usage of modules/libraries inside the \
|
||||
ELF binary. You can get more info from each block by \
|
||||
hovering over them.
|
||||
hovering over them.
|
||||
|
||||
On top of the treemap there is a scale, showing how much space the \
|
||||
treemap itself constitutes of the binary, the excluded symbols/modules \
|
||||
|
@ -864,7 +860,7 @@ let compare_builds
|
|||
~(build_right : Builder_db.Build.t)
|
||||
~env_diff:(added_env, removed_env, changed_env)
|
||||
~pkg_diff:(added_pkgs, removed_pkgs, changed_pkgs)
|
||||
~opam_diff:(opam_diff, version_diff, left, right, duniverse)
|
||||
~opam_diff:(opam_diff, version_diff, left, right, duniverse_content_diff, duniverse_left, duniverse_right)
|
||||
=
|
||||
let items, data =
|
||||
List.fold_left (fun (items, data) (id, txt, amount, code) ->
|
||||
|
@ -875,39 +871,33 @@ let compare_builds
|
|||
H.li [ H.a ~a:[H.a_href id_href] [txtf "%d %s" amount txt] ] :: items,
|
||||
data @ H.h3 ~a:[H.a_id id] [H.txt txt] :: code)
|
||||
([], [])
|
||||
([ ("opam-packages-removed", "Opam packages removed",
|
||||
OpamPackage.Set.cardinal left, [ H.code (packages left) ]) ;
|
||||
("opam-packages-installede", "New opam packages installed",
|
||||
OpamPackage.Set.cardinal right, [ H.code (packages right) ]) ;
|
||||
("opam-packages-version-diff", "Opam packages with version changes",
|
||||
List.length version_diff, [ H.code (package_diffs version_diff) ]) ;
|
||||
] @ (match duniverse with
|
||||
| Ok (duniverse_left, duniverse_right, duniverse_content_diff) ->
|
||||
[
|
||||
("duniverse-dirs-removed", "Duniverse directories removed",
|
||||
List.length duniverse_left, [ H.code (duniverse_dirs duniverse_left) ]) ;
|
||||
("duniverse-dirs-installed", "New duniverse directories installed",
|
||||
List.length duniverse_right, [ H.code (duniverse_dirs duniverse_right) ]) ;
|
||||
("duniverse-dirs-content-diff", "Duniverse directories with content changes",
|
||||
List.length duniverse_content_diff, [ H.code (duniverse_diffs duniverse_content_diff) ]) ;
|
||||
]
|
||||
| Error `Msg msg -> [ "duniverse-dirs-error", "Duniverse parsing error", 1, [ H.txt msg ] ]
|
||||
) @ [
|
||||
("opam-packages-opam-diff", "Opam packages with changes in their opam file",
|
||||
List.length opam_diff, opam_diffs opam_diff) ;
|
||||
("env-removed", "Environment variables removed",
|
||||
List.length removed_env, [ H.code (key_values removed_env) ]) ;
|
||||
("env-added", "New environment variables added",
|
||||
List.length added_env, [ H.code (key_values added_env) ]) ;
|
||||
("env-changed", "Environment variables changed",
|
||||
List.length changed_env, [ H.code (key_value_changes changed_env) ]) ;
|
||||
("pkgs-removed", "System packages removed",
|
||||
List.length removed_pkgs, [ H.code (key_values removed_pkgs) ]) ;
|
||||
("pkgs-added", "New system packages added",
|
||||
List.length added_pkgs, [ H.code (key_values added_pkgs) ]) ;
|
||||
("pkgs-changed", "System packages changed",
|
||||
List.length changed_pkgs, [ H.code (key_value_changes changed_pkgs) ]) ;
|
||||
])
|
||||
[ ("opam-packages-removed", "Opam packages removed",
|
||||
OpamPackage.Set.cardinal left, [ H.code (packages left) ]) ;
|
||||
("opam-packages-installede", "New opam packages installed",
|
||||
OpamPackage.Set.cardinal right, [ H.code (packages right) ]) ;
|
||||
("opam-packages-version-diff", "Opam packages with version changes",
|
||||
List.length version_diff, [ H.code (package_diffs version_diff) ]) ;
|
||||
("duniverse-dirs-removed", "Duniverse directories removed",
|
||||
List.length duniverse_left, [ H.code (duniverse_dirs duniverse_left) ]) ;
|
||||
("duniverse-dirs-installed", "New duniverse directories installed",
|
||||
List.length duniverse_right, [ H.code (duniverse_dirs duniverse_right) ]) ;
|
||||
("duniverse-dirs-content-diff", "Duniverse directories with content changes",
|
||||
List.length duniverse_content_diff, [ H.code (duniverse_diffs duniverse_content_diff) ]) ;
|
||||
("opam-packages-opam-diff", "Opam packages with changes in their opam file",
|
||||
List.length opam_diff, opam_diffs opam_diff) ;
|
||||
("env-removed", "Environment variables removed",
|
||||
List.length removed_env, [ H.code (key_values removed_env) ]) ;
|
||||
("env-added", "New environment variables added",
|
||||
List.length added_env, [ H.code (key_values added_env) ]) ;
|
||||
("env-changed", "Environment variables changed",
|
||||
List.length changed_env, [ H.code (key_value_changes changed_env) ]) ;
|
||||
("pkgs-removed", "System packages removed",
|
||||
List.length removed_pkgs, [ H.code (key_values removed_pkgs) ]) ;
|
||||
("pkgs-added", "New system packages added",
|
||||
List.length added_pkgs, [ H.code (key_values added_pkgs) ]) ;
|
||||
("pkgs-changed", "System packages changed",
|
||||
List.length changed_pkgs, [ H.code (key_value_changes changed_pkgs) ]) ;
|
||||
]
|
||||
in
|
||||
layout
|
||||
~nav:(`Comparison ((job_left, build_left), (job_right, build_right)))
|
||||
|
@ -953,22 +943,15 @@ let failed_builds ~start ~count builds =
|
|||
]
|
||||
in
|
||||
layout ~title:"Failed builds"
|
||||
(match builds with
|
||||
| [] ->
|
||||
[
|
||||
H.h1 [H.txt "No failed builds to list"];
|
||||
H.p [H.txt "🥳"];
|
||||
]
|
||||
| _ :: _ ->
|
||||
[
|
||||
H.h1 [H.txt "Failed builds"];
|
||||
H.ul (List.map build builds);
|
||||
H.p [ txtf "View the next %d failed builds " count;
|
||||
H.a ~a:H.[
|
||||
a_href @@ Link.Failed_builds.make
|
||||
~count ~start:(start + count) () ]
|
||||
[ H.txt "here"];
|
||||
H.txt ".";
|
||||
]
|
||||
])
|
||||
([
|
||||
H.h1 [H.txt "Failed builds"];
|
||||
H.ul (List.map build builds);
|
||||
H.p [ txtf "View the next %d failed builds " count;
|
||||
H.a ~a:H.[
|
||||
a_href @@ Link.Failed_builds.make
|
||||
~count ~start:(start + count) () ]
|
||||
[ H.txt "here"];
|
||||
H.txt ".";
|
||||
]
|
||||
])
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
module Set = OpamPackage.Set
|
||||
|
||||
type package = OpamPackage.t
|
||||
|
||||
let packages (switch : OpamFile.SwitchExport.t) =
|
||||
assert (Set.cardinal switch.selections.sel_pinned = 0);
|
||||
assert (Set.cardinal switch.selections.sel_compiler = 0);
|
||||
|
@ -37,11 +39,7 @@ let duniverse_dirs_data =
|
|||
in
|
||||
let* dir = string ~ctx:"directory" dir in
|
||||
Ok (url, dir, List.rev hashes)
|
||||
| { pelem = List { pelem = [ url ; dir ] ; _ } ; _ } ->
|
||||
let* url = string ~ctx:"url" url in
|
||||
let* dir = string ~ctx:"directory" dir in
|
||||
Ok (url, dir, [])
|
||||
| _ -> Error (`Msg "expected a list of URL, DIR, [HASHES]")
|
||||
| _ -> Error (`Msg "expected a string or identifier")
|
||||
in
|
||||
function
|
||||
| { pelem = List { pelem = lbody ; _ } ; _ } ->
|
||||
|
@ -56,15 +54,15 @@ let duniverse (switch : OpamFile.SwitchExport.t) =
|
|||
let root = switch.OpamFile.SwitchExport.selections.OpamTypes.sel_roots in
|
||||
if OpamPackage.Set.cardinal root = 1 then
|
||||
let root = OpamPackage.Set.choose root in
|
||||
match OpamPackage.(Name.Map.find_opt root.name switch.OpamFile.SwitchExport.overlays) with
|
||||
| None -> Error (`Msg "opam switch export doesn't contain the main package")
|
||||
| Some opam ->
|
||||
match OpamFile.OPAM.extended opam duniverse_dir duniverse_dirs_data with
|
||||
| None -> Ok None
|
||||
| Some Error e -> Error e
|
||||
| Some Ok v -> Ok (Some v)
|
||||
Option.bind
|
||||
OpamPackage.(Name.Map.find_opt root.name switch.OpamFile.SwitchExport.overlays)
|
||||
(fun opam ->
|
||||
match OpamFile.OPAM.extended opam duniverse_dir duniverse_dirs_data with
|
||||
| None -> None
|
||||
| Some Error _ -> None
|
||||
| Some Ok v -> Some v)
|
||||
else
|
||||
Error (`Msg "not a single root package found in opam switch export")
|
||||
None
|
||||
|
||||
type duniverse_diff = {
|
||||
name : string ;
|
||||
|
@ -91,19 +89,11 @@ let duniverse_diff l r =
|
|||
let keys_l_only = ref [] and keys_r_only = ref [] and diff = ref [] in
|
||||
let equal_hashes l r =
|
||||
(* l and r are lists of pairs, with the hash kind and its value *)
|
||||
(* for a git remote, the hashes are empty lists *)
|
||||
(match l with [] -> false | _ -> true) &&
|
||||
(match r with [] -> false | _ -> true) &&
|
||||
List.for_all (fun (h, v) ->
|
||||
match List.assoc_opt h r with
|
||||
| None -> false
|
||||
| None -> true
|
||||
| Some v' -> String.equal v v')
|
||||
l &&
|
||||
List.for_all (fun (h, v) ->
|
||||
match List.assoc_opt h l with
|
||||
| None -> false
|
||||
| Some v' -> String.equal v v')
|
||||
r
|
||||
l
|
||||
in
|
||||
let _ =
|
||||
M.merge (fun key l r ->
|
||||
|
@ -112,7 +102,6 @@ let duniverse_diff l r =
|
|||
| Some _, None -> keys_l_only := key :: !keys_l_only; None
|
||||
| None, None -> None
|
||||
| Some (_, l), Some (_, r) when equal_hashes l r -> None
|
||||
| Some (url1, []), Some (url2, []) when String.equal url1 url2 -> None
|
||||
| Some l, Some r -> diff := (key, l, r) :: !diff; None)
|
||||
l r
|
||||
in
|
||||
|
@ -269,9 +258,8 @@ let compare left right =
|
|||
and right_pkgs = diff packages_right packages_left
|
||||
in
|
||||
let opam_diff = detailed_opam_diffs left right opam_diff in
|
||||
let duniverse_ret =
|
||||
match duniverse left, duniverse right with
|
||||
| Ok l, Ok r -> Ok (duniverse_diff l r)
|
||||
| Error _ as e, _ | _, (Error _ as e) -> e
|
||||
let left_duniverse, right_duniverse, duniverse_diff =
|
||||
duniverse_diff (duniverse left) (duniverse right)
|
||||
in
|
||||
(opam_diff, version_diff, left_pkgs, right_pkgs, duniverse_ret)
|
||||
(opam_diff, version_diff, left_pkgs, right_pkgs,
|
||||
duniverse_diff, left_duniverse, right_duniverse)
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
type opam_diff = {
|
||||
pkg : OpamPackage.t ;
|
||||
build : (OpamTypes.command list * OpamTypes.command list) option ;
|
||||
install : (OpamTypes.command list * OpamTypes.command list) option ;
|
||||
url : (OpamFile.URL.t option * OpamFile.URL.t option) option ;
|
||||
otherwise_equal : bool ;
|
||||
}
|
||||
|
||||
type version_diff = {
|
||||
name : OpamPackage.Name.t;
|
||||
version_left : OpamPackage.Version.t;
|
||||
version_right : OpamPackage.Version.t;
|
||||
}
|
||||
|
||||
type duniverse_diff = {
|
||||
name : string ;
|
||||
urls : string * string option ;
|
||||
hash : (OpamHash.kind * string option * string option) list ;
|
||||
}
|
||||
|
||||
val pp_opampackage : Format.formatter -> OpamPackage.t -> unit
|
||||
|
||||
val pp_version_diff : Format.formatter -> version_diff -> unit
|
||||
|
||||
val pp_duniverse_diff : Format.formatter -> duniverse_diff -> unit
|
||||
|
||||
val pp_duniverse_dir : Format.formatter -> string * string -> unit
|
||||
|
||||
val pp_opam_diff : Format.formatter -> opam_diff -> unit
|
||||
|
||||
val commands_to_strings : OpamTypes.command list * OpamTypes.command list -> string list * string list
|
||||
|
||||
val opt_url_to_string : OpamFile.URL.t option * OpamFile.URL.t option -> string * string
|
||||
|
||||
|
||||
val compare: OpamFile.SwitchExport.t ->
|
||||
OpamFile.SwitchExport.t ->
|
||||
opam_diff list * version_diff list * OpamPackage.Set.t * OpamPackage.Set.t * ((string * string) list * (string * string) list * duniverse_diff list, [> `Msg of string ]) result
|
||||
|
|
@ -32,8 +32,6 @@ Options:
|
|||
Hex encoded SHA256 digest of the main binary.
|
||||
--job=STRING
|
||||
Job name that was built.
|
||||
--main-binary-filepath=STRING
|
||||
The file path of the main binary.
|
||||
EOM
|
||||
exit 1
|
||||
}
|
||||
|
@ -41,7 +39,6 @@ EOM
|
|||
BUILD_TIME=
|
||||
SHA=
|
||||
JOB=
|
||||
FILEPATH=
|
||||
|
||||
while [ $# -gt 1 ]; do
|
||||
OPT="$1"
|
||||
|
@ -56,9 +53,6 @@ while [ $# -gt 1 ]; do
|
|||
--job=*)
|
||||
JOB="${OPT##*=}"
|
||||
;;
|
||||
--main-binary-filepath=*)
|
||||
FILEPATH="${OPT##*=}"
|
||||
;;
|
||||
--*)
|
||||
warn "Ignoring unknown option: '${OPT}'"
|
||||
;;
|
||||
|
@ -73,14 +67,13 @@ done
|
|||
[ -z "${BUILD_TIME}" ] && die "The --build-time option must be specified"
|
||||
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
|
||||
[ -z "${JOB}" ] && die "The --job option must be specified"
|
||||
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
|
||||
|
||||
FILENAME="${1}"
|
||||
|
||||
: "${REPO:="/usr/local/www/pkg"}"
|
||||
: "${REPO_KEY:="/usr/local/etc/builder-web/repo.key"}"
|
||||
|
||||
if [ "$(basename "${FILEPATH}" .pkg)" = "$(basename "${FILEPATH}")" ]; then
|
||||
if [ "$(basename "${FILENAME}" .pkg)" = "$(basename "${FILENAME}")" ]; then
|
||||
echo "Not a FreeBSD package"
|
||||
exit 0
|
||||
fi
|
||||
|
@ -131,7 +124,6 @@ PKG_DIR="${REPO_DIR}/All"
|
|||
# and then move it before recreating the index
|
||||
pkg create -r "${PKG_ROOT}" -m "${MANIFEST}" -o "${TMP}"
|
||||
mkdir -p "${PKG_DIR}"
|
||||
rm -f "${PKG_DIR}"/"${NAME}"-*.pkg
|
||||
mv "${TMP}/${NAME}-${FULL_VERSION}.pkg" "${PKG_DIR}"
|
||||
|
||||
pkg repo "${REPO_DIR}" "${REPO_KEY}"
|
||||
|
|
|
@ -2,7 +2,7 @@ name: builder-web
|
|||
version: %%VERSION_NUM%%
|
||||
origin: local/builder-web
|
||||
comment: Builder web service
|
||||
www: https://git.robur.coop/robur/builder-web
|
||||
www: https://git.robur.io/robur/builder-web
|
||||
maintainer: Robur <team@robur.coop>
|
||||
prefix: /usr/local
|
||||
licenselogic: single
|
||||
|
|
|
@ -33,7 +33,7 @@ procname="/usr/local/libexec/builder-web"
|
|||
|
||||
builder_web_start () {
|
||||
echo "Starting ${name}."
|
||||
/usr/sbin/daemon -S -r -P "${pidfile}" -u "${builder_web_user}" \
|
||||
/usr/sbin/daemon -S -p "${pidfile}" -u "${builder_web_user}" \
|
||||
"${procname}" ${builder_web_flags}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ die()
|
|||
usage()
|
||||
{
|
||||
cat <<EOM 1>&2
|
||||
usage: ${prog_NAME} [ OPTIONS ]
|
||||
usage: ${prog_NAME} [ OPTIONS ]
|
||||
Generates visualizations of all things
|
||||
--data-dir=STRING
|
||||
Path to the data directory.
|
||||
|
@ -77,8 +77,6 @@ done
|
|||
DB="${DATA_DIR}/builder.sqlite3"
|
||||
[ ! -e "$DB" ] && die "The database doesn't exist: '$DB'"
|
||||
|
||||
# Let's be somewhat lenient with the database version.
|
||||
# In visualizations.sh we can be more strict.
|
||||
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
|
||||
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
|
||||
[ "$DB_VERSION" -lt 16 ] && die "The database version should be >= 16. It is '$DB_VERSION'"
|
||||
|
@ -87,7 +85,7 @@ APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
|
|||
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
|
||||
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be = 1234839235. It is '$APP_ID'"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo "-----------------------------------------------------------------------------"
|
||||
info "Starting batch creation of visualizations: $(date)"
|
||||
|
||||
|
@ -129,22 +127,8 @@ fi
|
|||
ATTEMPTED_VIZS=0
|
||||
FAILED_VIZS=0
|
||||
|
||||
distinct-input () {
|
||||
{
|
||||
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
|
||||
FROM build b
|
||||
JOIN build_artifact opam ON opam.build = b.id
|
||||
WHERE opam.filepath = 'opam-switch' AND b.main_binary NOT NULL
|
||||
GROUP BY opam.sha256;"
|
||||
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
|
||||
FROM build b
|
||||
JOIN build_artifact debug ON debug.build = b.id
|
||||
WHERE debug.filepath LIKE '%.debug' AND b.main_binary NOT NULL
|
||||
GROUP BY debug.sha256;"
|
||||
} | sort -u
|
||||
}
|
||||
|
||||
for UUID in $(distinct-input); do
|
||||
for i in $(find "${DATA_DIR}" -type f -path \*output/bin\*); do
|
||||
UUID=$(echo "${i}" | rev | cut -d '/' -f 4 | rev)
|
||||
if ! "$VISUALIZATIONS_CMD" \
|
||||
--data-dir="${DATA_DIR}" \
|
||||
--cache-dir="${CACHE_DIR}" \
|
||||
|
|
|
@ -15,14 +15,11 @@ freebsd_sanitize_version () {
|
|||
exit 1;
|
||||
fi
|
||||
if [ $version_with_commit -eq 0 ]; then
|
||||
v="${v}.0.g0000000.${post}"
|
||||
else
|
||||
v="${v}.${post}"
|
||||
v="${v}.0.g0000000"
|
||||
fi
|
||||
echo $v
|
||||
}
|
||||
|
||||
echo "using FreeBSD pkg to compare versions now:"
|
||||
while read version_a version_b; do
|
||||
version_a=$(freebsd_sanitize_version $version_a)
|
||||
version_b=$(freebsd_sanitize_version $version_b)
|
||||
|
@ -30,28 +27,7 @@ while read version_a version_b; do
|
|||
printf "%s %s %s\n" "$version_a" "$result" "$version_b"
|
||||
done < versions.txt
|
||||
|
||||
debian_sanitize_version () {
|
||||
post=$(echo $1 | rev | cut -d '-' -f 1-2 | rev)
|
||||
v=$(echo $1 | rev | cut -d '-' -f 3- | rev)
|
||||
version_good=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$')
|
||||
version_with_commit=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$')
|
||||
if [ $version_good -eq 0 -a $version_with_commit -eq 0 ]; then
|
||||
echo "invalid version $v";
|
||||
exit 1;
|
||||
fi
|
||||
if [ $version_with_commit -eq 0 ]; then
|
||||
v="${v}-0-g0000000-${post}"
|
||||
else
|
||||
v="${v}-${post}"
|
||||
fi
|
||||
echo $v
|
||||
}
|
||||
|
||||
echo ""
|
||||
echo "using Debian dpkg to compare versions now:"
|
||||
while read version_a version_b; do
|
||||
version_a=$(debian_sanitize_version $version_a)
|
||||
version_b=$(debian_sanitize_version $version_b)
|
||||
if dpkg --compare-versions "$version_a" lt "$version_b"; then
|
||||
echo "$version_a < $version_b"
|
||||
else
|
||||
|
|
|
@ -4,9 +4,9 @@ Section: unknown
|
|||
Priority: optional
|
||||
Maintainer: Robur Team <team@robur.coop>
|
||||
Standards-Version: 4.4.1
|
||||
Homepage: https://git.robur.coop/robur/builder-web
|
||||
Vcs-Browser: https://git.robur.coop/robur/builder-web
|
||||
Vcs-Git: https://git.robur.coop/robur/builder-web.git
|
||||
Homepage: https://git.robur.io/robur/builder-web
|
||||
Vcs-Browser: https://git.robur.io/robur/builder-web
|
||||
Vcs-Git: https://git.robur.io/robur/builder-web.git
|
||||
Architecture: all
|
||||
Depends: libgmp10, libsqlite3-0, libev4, opam-graph, modulectomy
|
||||
Description: Web service for storing and presenting builds.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: builder-web
|
||||
Upstream-Contact: Robur Team <team@robur.coop>
|
||||
Source: https://git.robur.coop/robur/builder-web
|
||||
Source: https://git.robur.io/robur/builder-web
|
||||
|
||||
Files: *
|
||||
Copyright: "Reynir Björnsson <reynir@reynir.dk>" "Hannes Mehnert <hannes@mehnert.org>"
|
||||
|
|
|
@ -36,8 +36,6 @@ Options:
|
|||
Job name that was built.
|
||||
--platform=STRING
|
||||
Platform name on which the build was performed.
|
||||
--main-binary-filepath=STRING
|
||||
The file path of the main binary.
|
||||
EOM
|
||||
exit 1
|
||||
}
|
||||
|
@ -46,7 +44,6 @@ BUILD_TIME=
|
|||
SHA=
|
||||
JOB=
|
||||
PLATFORM=
|
||||
FILEPATH=
|
||||
|
||||
while [ $# -gt 1 ]; do
|
||||
OPT="$1"
|
||||
|
@ -64,9 +61,6 @@ while [ $# -gt 1 ]; do
|
|||
--platform=*)
|
||||
PLATFORM="${OPT##*=}"
|
||||
;;
|
||||
--main-binary-filepath=*)
|
||||
FILEPATH="${OPT##*=}"
|
||||
;;
|
||||
--*)
|
||||
warn "Ignoring unknown option: '${OPT}'"
|
||||
;;
|
||||
|
@ -82,11 +76,10 @@ done
|
|||
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
|
||||
[ -z "${JOB}" ] && die "The --job option must be specified"
|
||||
[ -z "${PLATFORM}" ] && die "The --platform option must be specified"
|
||||
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
|
||||
|
||||
FILENAME="${1}"
|
||||
|
||||
if [ $(basename "${FILEPATH}" .deb) = $(basename "${FILEPATH}") ]; then
|
||||
if [ $(basename "${FILENAME}" .deb) = $(basename "${FILENAME}") ]; then
|
||||
echo "Not a Debian package"
|
||||
exit 0
|
||||
fi
|
||||
|
@ -111,16 +104,6 @@ mkdir "${PKG_ROOT}"
|
|||
dpkg-deb -R "${FILENAME}" "${PKG_ROOT}"
|
||||
|
||||
VERSION=$(dpkg-deb -f "${FILENAME}" Version)
|
||||
# if we've a tagged version (1.5.0), append the number of commits and a dummy hash
|
||||
VERSION_GOOD=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$') || true
|
||||
VERSION_WITH_COMMIT=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$') || true
|
||||
if [ $VERSION_GOOD -eq 0 -a $VERSION_WITH_COMMIT -eq 0 ]; then
|
||||
die "version does not conform to (MAJOR.MINOR.PATCH[-#NUM_COMMITS-g<HASH>])"
|
||||
fi
|
||||
if [ $VERSION_WITH_COMMIT -eq 0 ]; then
|
||||
VERSION="${VERSION}-0-g0000000"
|
||||
fi
|
||||
|
||||
NEW_VERSION="${VERSION}"-"${BUILD_TIME}"-"${SHA}"
|
||||
|
||||
sed -i "" -e "s/Version:.*/Version: ${NEW_VERSION}/g" "${PKG_ROOT}/DEBIAN/control"
|
||||
|
@ -131,8 +114,6 @@ if ! aptly repo show "${PLATFORM}" > /dev/null 2>&1; then
|
|||
aptly repo create --distribution="${PLATFORM}" "${PLATFORM}"
|
||||
fi
|
||||
|
||||
PACKAGE=$(dpkg-deb -f "${FILENAME}" Package)
|
||||
aptly repo remove "${PLATFORM}" "${PACKAGE}"
|
||||
aptly repo add "${PLATFORM}" "${TMP}"
|
||||
|
||||
: "${REPO_KEYID:="D5E2DC92617877EDF7D4FD4345EA05FB7E26053D"}"
|
||||
|
|
|
@ -6,4 +6,3 @@
|
|||
3.0.0-20230101-abcd 3.0.1-20230204-bdbd
|
||||
1.5.0-20220516-a0d5a2 1.5.0-3-g26b5a59-20220527-0bc180
|
||||
1.5.0-3-g26b5a59-20220527-0bc180 1.5.1-20220527-0bc180
|
||||
0.1.0-20221120104301-f9e456637274844d45d9758ec661a136d0cda7966b075e4426b69fe6da00427b 0.1.0-237-g62965d4-20230527202149-6118c39221f318154e234098b5cffd4dc1d80f19cf2200cc6b1eb768dbf6decb
|
||||
|
|
|
@ -70,59 +70,46 @@ done
|
|||
[ -z "${CACHE_DIR}" ] && die "The --cache-dir option must be specified"
|
||||
[ -z "${DATA_DIR}" ] && die "The --data-dir option must be specified"
|
||||
|
||||
info "processing UUID '${UUID}'"
|
||||
info "processing UUID '$UUID'"
|
||||
|
||||
DB="${DATA_DIR}/builder.sqlite3"
|
||||
|
||||
# A new visualizations.sh script may be installed during an upgrade while the
|
||||
# old builder-web binary is running. In that case things can get out of sync.
|
||||
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
|
||||
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
|
||||
[ "$DB_VERSION" -ne 18 ] && die "The database version should be 18. It is '$DB_VERSION'"
|
||||
|
||||
APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
|
||||
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
|
||||
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be 1234839235. It is '$APP_ID'"
|
||||
|
||||
get_main_binary () {
|
||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
||||
FROM build AS b
|
||||
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id AND b.main_binary = ba.id
|
||||
WHERE uuid = '${UUID}';"
|
||||
WHERE uuid = '$UUID';"
|
||||
}
|
||||
|
||||
BIN="${DATA_DIR}/$(get_main_binary)" || die "Failed to get main binary from database"
|
||||
[ -z "${BIN}" ] && die "No main-binary found in db '${DB}' for build '${UUID}'"
|
||||
[ -z "${BIN}" ] && die "No main-binary found in db '$DB' for build '$UUID'"
|
||||
|
||||
get_debug_binary () {
|
||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
||||
FROM build AS b
|
||||
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id
|
||||
WHERE
|
||||
uuid = '${UUID}'
|
||||
AND ba.filepath LIKE '%.debug';"
|
||||
uuid = '$UUID'
|
||||
AND ba.localpath LIKE '%.debug';"
|
||||
}
|
||||
|
||||
DEBUG_BIN_RELATIVE="$(get_debug_binary)" || die "Failed to get debug binary from database"
|
||||
|
||||
get_opam_switch () {
|
||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
||||
FROM build AS b
|
||||
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id
|
||||
WHERE
|
||||
uuid = '${UUID}'
|
||||
uuid = '$UUID'
|
||||
AND ba.filepath = 'opam-switch';"
|
||||
}
|
||||
|
||||
OPAM_SWITCH="$(get_opam_switch)" || die "Failed to get opam switch from database"
|
||||
[ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '${DB}' for build '${UUID}'"
|
||||
[ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '$DB' for build '$UUID'"
|
||||
OPAM_SWITCH="${DATA_DIR}/${OPAM_SWITCH}"
|
||||
|
||||
OPAM_GRAPH="opam-graph"
|
||||
MODULECTOMY="modulectomy"
|
||||
|
||||
LATEST_TREEMAPVIZ_VERSION="$(${MODULECTOMY} --version)" || die "Failed to get modulectomy version"
|
||||
LATEST_DEPENDENCIESVIZ_VERSION="$(${OPAM_GRAPH} --version)" || die "Failed to get opam-graph version"
|
||||
LATEST_TREEMAPVIZ_VERSION="$($MODULECTOMY --version)" || die "Failed to get modulectomy version"
|
||||
LATEST_DEPENDENCIESVIZ_VERSION="$($OPAM_GRAPH --version)" || die "Failed to get opam-graph version"
|
||||
|
||||
TREEMAP_CACHE_DIR="${CACHE_DIR}/treemap_${LATEST_TREEMAPVIZ_VERSION}"
|
||||
DEPENDENCIES_CACHE_DIR="${CACHE_DIR}/dependencies_${LATEST_DEPENDENCIESVIZ_VERSION}"
|
||||
|
@ -149,7 +136,7 @@ trap cleanup EXIT
|
|||
# /// Dependencies viz
|
||||
|
||||
if [ ! -d "${DEPENDENCIES_CACHE_DIR}" ]; then
|
||||
mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '${DEPENDENCIES_CACHE_DIR}'"
|
||||
mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '$DEPENDENCIES_CACHE_DIR'"
|
||||
fi
|
||||
|
||||
OPAM_SWITCH_FILEPATH='opam-switch'
|
||||
|
@ -157,8 +144,8 @@ OPAM_SWITCH_FILEPATH='opam-switch'
|
|||
get_opam_switch_hash () {
|
||||
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id
|
||||
WHERE uuid = '${UUID}'
|
||||
AND ba.filepath = '${OPAM_SWITCH_FILEPATH}';"
|
||||
WHERE uuid = '$UUID'
|
||||
AND ba.filepath = '$OPAM_SWITCH_FILEPATH';"
|
||||
}
|
||||
|
||||
DEPENDENCIES_INPUT_HASH="$(get_opam_switch_hash)" || die "Failed to get opam-switch hash from database"
|
||||
|
@ -168,8 +155,7 @@ if [ -e "${DEPENDENCIES_VIZ_FILENAME}" ]; then
|
|||
info "Dependency visualization already exists: '${DEPENDENCIES_VIZ_FILENAME}'"
|
||||
else
|
||||
if ${OPAM_GRAPH} --output-format=html "${OPAM_SWITCH}" > "${TMPDEPENDENCIES}"; then
|
||||
cp "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}"
|
||||
rm "${TMPDEPENDENCIES}"
|
||||
mv "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}"
|
||||
else
|
||||
die "opam-graph failed to generate visualization"
|
||||
fi
|
||||
|
@ -187,16 +173,16 @@ stat_aux () {
|
|||
fi
|
||||
}
|
||||
|
||||
SIZE="$(stat_aux "${BIN}")"
|
||||
SIZE="$(stat_aux "$BIN")"
|
||||
|
||||
if [ ! -d "${TREEMAP_CACHE_DIR}" ]; then
|
||||
mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '${TREEMAP_CACHE_DIR}'"
|
||||
mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '$TREEMAP_CACHE_DIR'"
|
||||
fi
|
||||
|
||||
get_debug_bin_hash () {
|
||||
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
|
||||
JOIN build_artifact AS ba ON ba.build = b.id
|
||||
WHERE uuid = '${UUID}'
|
||||
WHERE uuid = '$UUID'
|
||||
AND ba.filepath LIKE '%.debug';"
|
||||
}
|
||||
|
||||
|
@ -215,8 +201,7 @@ if [ -n "${DEBUG_BIN_RELATIVE}" ]; then
|
|||
"${DEBUG_BIN}" \
|
||||
> "${TMPTREE}"
|
||||
then
|
||||
cp "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}"
|
||||
rm "${TMPTREE}"
|
||||
mv "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}"
|
||||
else
|
||||
die "modulectomy failed to generate visualization"
|
||||
fi
|
||||
|
|
10
test/dune
10
test/dune
|
@ -1,16 +1,20 @@
|
|||
(test
|
||||
(name test_builder_db)
|
||||
(modules test_builder_db)
|
||||
(libraries ptime.clock.os builder_db caqti.blocking alcotest mirage-crypto-rng.unix ohex))
|
||||
(libraries builder_db caqti.blocking alcotest mirage-crypto-rng.unix))
|
||||
|
||||
(test
|
||||
(name markdown_to_html)
|
||||
(modules markdown_to_html)
|
||||
(libraries builder_web cmarkit alcotest))
|
||||
(libraries builder_web alcotest))
|
||||
|
||||
(test
|
||||
(name router)
|
||||
(modules router)
|
||||
(libraries builder_web fmt dream yojson alcotest)
|
||||
(preprocess
|
||||
(pps ppx_deriving.std ppx_deriving_yojson)))
|
||||
(pps
|
||||
ppx_deriving.std
|
||||
ppx_deriving_yojson
|
||||
))
|
||||
)
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
let markdown_to_html = Builder_web__Utils.md_to_html
|
||||
let markdown_to_html = Builder_web__Utils.Omd.html_of_string
|
||||
|
||||
let test_simple () =
|
||||
let markdown = {|# Hello world|} in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "simple html" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a>Hello world</h1>\n" html)
|
||||
Alcotest.(check string "simple html" "<h1>Hello world</h1>\n" html)
|
||||
|
||||
let test_html_script () =
|
||||
let markdown = {|# <script>Hello world</script>|} in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "html script header" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a><!-- CommonMark raw HTML omitted -->Hello world<!-- CommonMark raw HTML omitted --></h1>\n" html)
|
||||
Alcotest.(check string "html script header" "<h1>Hello world</h1>\n" html)
|
||||
|
||||
let test_preserve_span_content () =
|
||||
let markdown = {|* <span id="myref">My ref</span>
|
||||
|
@ -16,8 +16,10 @@ let test_preserve_span_content () =
|
|||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "html span content preserved"
|
||||
{|<ul>
|
||||
<li><!-- CommonMark raw HTML omitted -->My ref<!-- CommonMark raw HTML omitted --></li>
|
||||
<li><a href="#myref">See my ref</a> for more information</li>
|
||||
<li>My ref
|
||||
</li>
|
||||
<li>See my ref for more information
|
||||
</li>
|
||||
</ul>
|
||||
|}
|
||||
html)
|
||||
|
@ -25,21 +27,20 @@ let test_preserve_span_content () =
|
|||
let test_remove_script () =
|
||||
let markdown = {|<script>alert(1);</script>|} in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "html script removed" "<!-- CommonMark HTML block omitted -->\n" html)
|
||||
Alcotest.(check string "html script removed" "" html)
|
||||
|
||||
let test_list_with_html_block_and_markdown () =
|
||||
let markdown = "* <div> Hello, World!</div> *this is not html*" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "list with html block and markdown"
|
||||
(*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*)
|
||||
"<ul>\n<li>\n<!-- CommonMark HTML block omitted -->\n</li>\n</ul>\n"
|
||||
(*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*) ""
|
||||
html)
|
||||
|
||||
let test_list_with_inline_html_and_markdown () =
|
||||
let markdown = "* <span> Hello, World!</span> *this is not html*" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "list with html block and markdown"
|
||||
"<ul>\n<li><!-- CommonMark raw HTML omitted --> Hello, World!<!-- CommonMark raw HTML omitted --> <em>this is not html</em></li>\n</ul>\n"
|
||||
"<ul>\n<li> Hello, World! <em>this is not html</em>\n</li>\n</ul>\n"
|
||||
html)
|
||||
|
||||
let test_absolute_link () =
|
||||
|
@ -50,131 +51,35 @@ let test_absolute_link () =
|
|||
let test_relative_link () =
|
||||
let markdown = "[foo](../foo.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "relative link" "<p><a href=\"../foo.jpg\">foo</a></p>\n" html)
|
||||
Alcotest.(check string "relative link" "<p>foo</p>\n" html)
|
||||
|
||||
let test_absolute_image () =
|
||||
let markdown = "![alttext](https://foo.com/bar.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "absolute image"
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" ></p>\n" html)
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" /></p>\n" html)
|
||||
|
||||
let test_absolute_image_no_alt () =
|
||||
let markdown = "![](https://foo.com/bar.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "absolute image"
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html)
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" /></p>\n" html)
|
||||
|
||||
let test_relative_image () =
|
||||
let markdown = "![](/bar.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "relative image" "<p><img src=\"/bar.jpg\" alt=\"\" ></p>\n" html)
|
||||
Alcotest.(check string "relative image" "" html)
|
||||
|
||||
let test_absolute_image_script_alt () =
|
||||
let markdown = "![<script src=\"bla.js\"></script>](https://foo.com/bar.jpg)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "absolute image with script alt text"
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html)
|
||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" /></p>\n" html)
|
||||
|
||||
let test_fragment_link () =
|
||||
let markdown = "[fragment](#fragment)" in
|
||||
let html = markdown_to_html markdown in
|
||||
Alcotest.(check string "fragment link" "<p><a href=\"#fragment\">fragment</a></p>\n" html)
|
||||
|
||||
let test_heading_adjustment () =
|
||||
let markdown = {|# foo
|
||||
## bar
|
||||
# baz
|
||||
## bazbar
|
||||
### bazbarbar
|
||||
#### bazbarbarbar
|
||||
##### bazbarbarbarbar
|
||||
###### bazbarbarbarbarbar
|
||||
|}
|
||||
in
|
||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
||||
(* NB: the maximum heading is 6 in cmarkit, thus we reduce the structure *)
|
||||
let exp = {|<h3 id="foo"><a class="anchor" aria-hidden="true" href="#foo"></a>foo</h3>
|
||||
<h4 id="bar"><a class="anchor" aria-hidden="true" href="#bar"></a>bar</h4>
|
||||
<h3 id="baz"><a class="anchor" aria-hidden="true" href="#baz"></a>baz</h3>
|
||||
<h4 id="bazbar"><a class="anchor" aria-hidden="true" href="#bazbar"></a>bazbar</h4>
|
||||
<h5 id="bazbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbar"></a>bazbarbar</h5>
|
||||
<h6 id="bazbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbar"></a>bazbarbarbar</h6>
|
||||
<h6 id="bazbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbar"></a>bazbarbarbarbar</h6>
|
||||
<h6 id="bazbarbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbarbar"></a>bazbarbarbarbarbar</h6>
|
||||
|} in
|
||||
Alcotest.(check string "header adjustment works fine" exp html)
|
||||
|
||||
let test_table () =
|
||||
let markdown = {__|| a | | b | c | d | e |
|
||||
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
|
||||
| entry | | **bla.file** | **other.file** | | |
|
||||
| _another entry_ | | **another.file** | **another.other** | | |
|
||||
|__}
|
||||
in
|
||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
||||
let exp = {|<div role="region"><table>
|
||||
<tr>
|
||||
<th>a</th>
|
||||
<th></th>
|
||||
<th>b</th>
|
||||
<th>c</th>
|
||||
<th>d</th>
|
||||
<th>e</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>entry</td>
|
||||
<td></td>
|
||||
<td><strong>bla.file</strong></td>
|
||||
<td><strong>other.file</strong></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><em>another entry</em></td>
|
||||
<td></td>
|
||||
<td><strong>another.file</strong></td>
|
||||
<td><strong>another.other</strong></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
</table></div>|} in
|
||||
Alcotest.(check string "table is rendered as html" exp html)
|
||||
|
||||
let test_table2 () =
|
||||
let markdown = {__|| a | | b | c | d | e |
|
||||
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
|
||||
| entry | | | | **bla.file** | **other.file** |
|
||||
| _another entry_ | | | **another.file** | **another.other** | |
|
||||
|__}
|
||||
in
|
||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
||||
let exp = {|<div role="region"><table>
|
||||
<tr>
|
||||
<th>a</th>
|
||||
<th></th>
|
||||
<th>b</th>
|
||||
<th>c</th>
|
||||
<th>d</th>
|
||||
<th>e</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>entry</td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td><strong>bla.file</strong></td>
|
||||
<td><strong>other.file</strong></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><em>another entry</em></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td><strong>another.file</strong></td>
|
||||
<td><strong>another.other</strong></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
</table></div>|} in
|
||||
Alcotest.(check string "table is rendered as html" exp html)
|
||||
Alcotest.(check string "fragment link" "<p>fragment</p>\n" html)
|
||||
|
||||
let markdown_tests = [
|
||||
Alcotest.test_case "Simple" `Quick test_simple;
|
||||
|
@ -190,9 +95,6 @@ let markdown_tests = [
|
|||
Alcotest.test_case "relative image" `Quick test_relative_image;
|
||||
Alcotest.test_case "absolute image with script alt" `Quick test_absolute_image_script_alt;
|
||||
Alcotest.test_case "fragment link" `Quick test_fragment_link;
|
||||
Alcotest.test_case "heading adjustment" `Quick test_heading_adjustment;
|
||||
Alcotest.test_case "table" `Quick test_table;
|
||||
Alcotest.test_case "table2" `Quick test_table2;
|
||||
]
|
||||
|
||||
let () =
|
||||
|
|
104
test/router.ml
104
test/router.ml
|
@ -2,7 +2,7 @@
|
|||
module Param_verification = struct
|
||||
|
||||
(*> None is 'verified'*)
|
||||
type t = wrong_type option
|
||||
type t = wrong_type option
|
||||
[@@deriving yojson,show,eq]
|
||||
|
||||
and wrong_type = {
|
||||
|
@ -14,30 +14,40 @@ module Param_verification = struct
|
|||
|
||||
module P = struct
|
||||
|
||||
let is_string : (string * string) -> _ option =
|
||||
Fun.const None
|
||||
|
||||
let is_uuid (param, value) =
|
||||
match Uuidm.of_string value with
|
||||
| Some _ when String.length value = 36 -> None
|
||||
| _ -> Some {
|
||||
param;
|
||||
expected = "Uuidm.t"
|
||||
}
|
||||
let is_string : (string * string) option -> _ option = function
|
||||
| Some _ -> None
|
||||
| None -> None
|
||||
|
||||
let is_uuid = function
|
||||
| Some (param, value) ->
|
||||
begin match Uuidm.of_string value with
|
||||
| Some _ when String.length value = 36 -> None
|
||||
| _ -> Some {
|
||||
param;
|
||||
expected = "Uuidm.t"
|
||||
}
|
||||
end
|
||||
| None -> None
|
||||
|
||||
end
|
||||
|
||||
let param req tag =
|
||||
match Dream.param req tag with
|
||||
| param -> Some (tag, param)
|
||||
| exception _ -> None
|
||||
|
||||
let verify parameters req =
|
||||
let verified_params =
|
||||
List.fold_left (fun acc p ->
|
||||
match acc with
|
||||
| None ->
|
||||
if String.starts_with ~prefix:"build" p then
|
||||
P.is_uuid (p, Dream.param req p)
|
||||
else
|
||||
P.is_string (p, Dream.param req p)
|
||||
| Some _ as x -> x)
|
||||
None parameters
|
||||
let ( &&& ) v v' =
|
||||
match v with
|
||||
| None -> v'
|
||||
| Some _ as some -> some
|
||||
|
||||
let verify req =
|
||||
let verified_params =
|
||||
P.is_string (param req "job")
|
||||
&&& P.is_uuid (param req "build")
|
||||
&&& P.is_uuid (param req "build_left")
|
||||
&&& P.is_uuid (param req "build_right")
|
||||
&&& P.is_string (param req "platform")
|
||||
in
|
||||
let response_json =
|
||||
verified_params |> to_yojson |> Yojson.Safe.to_string
|
||||
|
@ -45,23 +55,15 @@ module Param_verification = struct
|
|||
Dream.respond response_json
|
||||
|
||||
end
|
||||
|
||||
let find_parameters path =
|
||||
List.filter_map (fun s ->
|
||||
if String.length s > 0 && String.get s 0 = ':' then
|
||||
Some (String.sub s 1 (String.length s - 1))
|
||||
else
|
||||
None)
|
||||
(String.split_on_char '/' path)
|
||||
|
||||
|
||||
let router =
|
||||
(* XXX: this relies on [Builder_web.routes] only using {data,cache,config}dir
|
||||
* in the handlers which are never called here. The path /nonexistant is
|
||||
* assumed to not exist. *)
|
||||
let nodir = Fpath.v "/nonexistant" in
|
||||
Builder_web.routes ~datadir:nodir ~cachedir:nodir ~configdir:nodir ~expired_jobs:0
|
||||
Builder_web.routes ~datadir:nodir ~cachedir:nodir ~configdir:nodir
|
||||
|> List.map (fun (meth, route, _handler) ->
|
||||
meth, route, Param_verification.verify (find_parameters route))
|
||||
meth, route, Param_verification.verify)
|
||||
|> Builder_web.to_dream_routes
|
||||
|> Dream.router
|
||||
(* XXX: we test without remove_trailing_url_slash to ensure we don't produce
|
||||
|
@ -83,7 +85,7 @@ let test_link method_ target () =
|
|||
Alcotest.(check' (result Param_verification.alcotyp string) ~msg:"param-verification"
|
||||
~actual:body ~expected:(Ok None))
|
||||
|
||||
let test_link_artifact artifact =
|
||||
let test_link_artifact artifact =
|
||||
let job_name = "test" in
|
||||
let build = Uuidm.v `V4 in
|
||||
test_link `GET @@
|
||||
|
@ -147,7 +149,7 @@ let () =
|
|||
end;
|
||||
test_case "Link.Failed_builds.make" `Quick begin
|
||||
test_link `GET @@
|
||||
Builder_web.Link.Failed_builds.make ~count:2 ~start:1 ()
|
||||
Builder_web.Link.Failed_builds.make ~count:2 ~start:1 ()
|
||||
end;
|
||||
];
|
||||
(* this doesn't actually test the redirects, unfortunately *)
|
||||
|
@ -157,35 +159,5 @@ let () =
|
|||
"/f/bin/unikernel.hvt";
|
||||
"/";
|
||||
"";
|
||||
];
|
||||
"Albatross hardcoded links",
|
||||
[
|
||||
(*> Note: these links can be found in
|
||||
albatross/command-line/albatross_client_update.ml
|
||||
.. to find them I follewed the trails of 'Albatross_cli.http_host'
|
||||
*)
|
||||
begin
|
||||
let sha_str =
|
||||
Digestif.SHA256.(to_raw_string (digest_string "foo"))
|
||||
|> Ohex.encode
|
||||
in
|
||||
Fmt.str "/hash?sha256=%s" sha_str
|
||||
end;
|
||||
begin
|
||||
let jobname = "foo" in
|
||||
"/job/" ^ jobname ^ "/build/latest"
|
||||
end;
|
||||
begin
|
||||
let job = "foo" in
|
||||
let build = Uuidm.(v `V4 |> to_string) in
|
||||
"/job/" ^ job ^ "/build/" ^ build ^ "/main-binary"
|
||||
end;
|
||||
begin
|
||||
let old_uuid = Uuidm.(v `V4 |> to_string) in
|
||||
let new_uuid = Uuidm.(v `V4 |> to_string) in
|
||||
Fmt.str "/compare/%s/%s" old_uuid new_uuid
|
||||
end;
|
||||
]
|
||||
|> List.map Alcotest.(fun p ->
|
||||
test_case ("…" ^ p) `Quick (test_link `GET p))
|
||||
]
|
||||
]
|
||||
|
|
|
@ -3,7 +3,7 @@ let ( >>| ) x f = Result.map f x
|
|||
|
||||
module type CONN = Caqti_blocking.CONNECTION
|
||||
|
||||
let () = Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna)
|
||||
let () = Mirage_crypto_rng_unix.initialize ()
|
||||
|
||||
let iter f xs = List.fold_left (fun r x -> r >>= fun () -> f x) (Ok ()) xs
|
||||
let get_opt message = function
|
||||
|
@ -25,8 +25,8 @@ module Testable = struct
|
|||
x.restricted = y.restricted &&
|
||||
match x.password_hash, y.password_hash with
|
||||
| `Scrypt (hash, salt, params), `Scrypt (hash', salt', params') ->
|
||||
String.equal hash hash' &&
|
||||
String.equal salt salt' &&
|
||||
Cstruct.equal hash hash' &&
|
||||
Cstruct.equal salt salt' &&
|
||||
params = params'
|
||||
in
|
||||
let pp ppf { Builder_web_auth.username; password_hash; restricted } =
|
||||
|
@ -34,7 +34,7 @@ module Testable = struct
|
|||
| `Scrypt (hash, salt, { Builder_web_auth.scrypt_n; scrypt_r; scrypt_p }) ->
|
||||
Format.fprintf ppf "user:%s;(%d,%d,%d);%B;%a;%a" username
|
||||
scrypt_n scrypt_r scrypt_p restricted
|
||||
Ohex.pp hash Ohex.pp salt
|
||||
Cstruct.hexdump_pp hash Cstruct.hexdump_pp salt
|
||||
in
|
||||
Alcotest.testable
|
||||
pp
|
||||
|
@ -43,15 +43,18 @@ module Testable = struct
|
|||
let file =
|
||||
let equal (x : Builder_db.Rep.file) (y : Builder_db.Rep.file) =
|
||||
Fpath.equal x.filepath y.filepath &&
|
||||
String.equal x.sha256 y.sha256 &&
|
||||
Fpath.equal x.localpath y.localpath &&
|
||||
Cstruct.equal x.sha256 y.sha256 &&
|
||||
x.size = y.size
|
||||
in
|
||||
let pp ppf { Builder_db.Rep.filepath; sha256; size } =
|
||||
let pp ppf { Builder_db.Rep.filepath; localpath; sha256; size } =
|
||||
Format.fprintf ppf "{@[<v 1>@;<1 0>Builder_db.Rep.filepath = %a;@;<1 0>\
|
||||
localpath = %a;@;<1 0>\
|
||||
sha256 = %a;@;<1 0>\
|
||||
size = %d;@;<1 0>\
|
||||
@]@,}"
|
||||
Fpath.pp filepath Ohex.pp sha256 size
|
||||
Fpath.pp filepath Fpath.pp localpath
|
||||
Cstruct.hexdump_pp sha256 size
|
||||
in
|
||||
Alcotest.testable pp equal
|
||||
|
||||
|
@ -130,13 +133,14 @@ let finish = Option.get (Ptime.of_float_s 1.)
|
|||
let result = Builder.Exited 0
|
||||
let main_binary =
|
||||
let filepath = Result.get_ok (Fpath.of_string "bin/hello.sh") in
|
||||
let localpath = Result.get_ok (Fpath.of_string "/dev/null") in
|
||||
let data = "#!/bin/sh\necho Hello, World\n" in
|
||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
||||
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||
let size = String.length data in
|
||||
{ Builder_db.Rep.filepath; sha256; size }
|
||||
{ Builder_db.Rep.filepath; localpath; sha256; size }
|
||||
let main_binary2 =
|
||||
let data = "#!/bin/sh\necho Hello, World 2\n" in
|
||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
||||
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||
let size = String.length data in
|
||||
{ main_binary with sha256 ; size }
|
||||
let platform = "exotic-os"
|
||||
|
@ -145,17 +149,21 @@ let fail_if_none a =
|
|||
Option.to_result ~none:(`Msg "Failed to retrieve") a
|
||||
|
||||
let add_test_build user_id (module Db : CONN) =
|
||||
let open Builder_db in
|
||||
Db.start () >>= fun () ->
|
||||
Db.exec Job.try_add job_name >>= fun () ->
|
||||
Db.find_opt Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.exec Build.add { Build.uuid; start; finish; result; console; script; platform;
|
||||
main_binary = None; input_id = None; user_id; job_id } >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun id ->
|
||||
Db.exec Build_artifact.add (main_binary, id) >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun main_binary_id ->
|
||||
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
|
||||
Db.commit ()
|
||||
let r =
|
||||
let open Builder_db in
|
||||
Db.start () >>= fun () ->
|
||||
Db.exec Job.try_add job_name >>= fun () ->
|
||||
Db.find_opt Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.exec Build.add { Build.uuid; start; finish; result; console; script; platform;
|
||||
main_binary = None; input_id = None; user_id; job_id } >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun id ->
|
||||
Db.exec Build_artifact.add (main_binary, id) >>= fun () ->
|
||||
Db.find last_insert_rowid () >>= fun main_binary_id ->
|
||||
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
|
||||
Db.commit ()
|
||||
in
|
||||
Result.fold ~ok:Result.ok ~error:(fun _ -> Db.rollback ())
|
||||
r
|
||||
|
||||
let with_build_db f () =
|
||||
or_fail
|
||||
|
@ -219,7 +227,7 @@ let test_build_get_latest (module Db : CONN) =
|
|||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.find_opt Builder_db.Build.get_latest_successful_with_binary (job_id, platform)
|
||||
>>| get_opt "no latest build" >>| fun (_id, meta, main_binary') ->
|
||||
Alcotest.(check Testable.file) "same main binary" main_binary2 main_binary';
|
||||
Alcotest.(check (option Testable.file)) "same main binary" main_binary' (Some main_binary2);
|
||||
Alcotest.(check Testable.uuid) "same uuid" meta.uuid uuid'
|
||||
|
||||
let test_build_get_previous (module Db : CONN) =
|
||||
|
@ -261,14 +269,6 @@ let test_artifact_get_by_build_uuid (module Db : CONN) =
|
|||
get_opt "no build" >>| fun (_id, file) ->
|
||||
Alcotest.(check Testable.file) "same file" file main_binary
|
||||
|
||||
let test_artifact_exists_true (module Db : CONN) =
|
||||
Db.find Builder_db.Build_artifact.exists main_binary.sha256 >>| fun exists ->
|
||||
Alcotest.(check bool) "main binary exists" true exists
|
||||
|
||||
let test_artifact_exists_false (module Db : CONN) =
|
||||
Db.find Builder_db.Build_artifact.exists main_binary2.sha256 >>| fun exists ->
|
||||
Alcotest.(check bool) "main binary2 doesn't exists" false exists
|
||||
|
||||
(* XXX: This test should fail because main_binary on the corresponding build
|
||||
* references its main_binary. This is not the case now due to foreign key. *)
|
||||
let test_artifact_remove_by_build (module Db : CONN) =
|
||||
|
@ -276,39 +276,6 @@ let test_artifact_remove_by_build (module Db : CONN) =
|
|||
get_opt "no build" >>= fun (id, _build) ->
|
||||
Db.exec Builder_db.Build_artifact.remove_by_build id
|
||||
|
||||
let test_get_builds_older_than (module Db : CONN) =
|
||||
add_second_build (module Db) >>= fun () ->
|
||||
let date = Option.get (Ptime.of_float_s (3600. /. 2.)) in
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, date) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "last build" builds [ uuid ];
|
||||
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, Ptime_clock.now ()) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
(* NOTE(dinosaure): from the most recent to the older. *)
|
||||
Alcotest.(check (list Testable.uuid)) "last builds" builds [ uuid'; uuid ];
|
||||
Ok ()
|
||||
|
||||
let test_builds_excluding_latest_n (module Db : CONN) =
|
||||
add_second_build (module Db) >>= fun () ->
|
||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 1) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "keep recent build" builds [ uuid ];
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 2) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "keep 2 builds" builds [];
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 3) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "last more builds than we have" builds [];
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 0) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "delete all builds" builds [ uuid'; uuid ];
|
||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, -1) >>= fun builds ->
|
||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
||||
Alcotest.(check (list Testable.uuid)) "test an incomprehensible argument (-1)" builds [ uuid'; uuid ];
|
||||
Ok ()
|
||||
|
||||
let () =
|
||||
let open Alcotest in
|
||||
Alcotest.run "Builder_db" [
|
||||
|
@ -339,12 +306,6 @@ let () =
|
|||
"build-artifact", [
|
||||
test_case "Get all by build" `Quick (with_build_db test_artifact_get_all_by_build);
|
||||
test_case "Get by build uuid" `Quick (with_build_db test_artifact_get_by_build_uuid);
|
||||
test_case "Artifact exists" `Quick (with_build_db test_artifact_exists_true);
|
||||
test_case "Other artifact doesn't exists" `Quick (with_build_db test_artifact_exists_false);
|
||||
test_case "Remove by build" `Quick (with_build_db test_artifact_remove_by_build);
|
||||
];
|
||||
"vacuum", [
|
||||
test_case "Get builds older than now" `Quick (with_build_db test_get_builds_older_than);
|
||||
test_case "Get older builds and keep a fixed number of then" `Quick (with_build_db test_builds_excluding_latest_n);
|
||||
]
|
||||
]
|
||||
|
|
Loading…
Reference in a new issue