Compare commits
2 commits
main
...
size-time-
Author | SHA1 | Date | |
---|---|---|---|
de5a4f3f87 | |||
4b976e9d04 |
59 changed files with 813 additions and 1579 deletions
26
CHANGES.md
26
CHANGES.md
|
@ -1,27 +1,3 @@
|
||||||
## v0.2.0 (2024-09-05)
|
# v0.1.0 (2021-11-12)
|
||||||
|
|
||||||
A whole slew of changes. Internally, we made a lot of incremental changes and improvements without doing a release. Thus this release is rather big. There is a lot of database migrations to apply, and unfortunately they have to be applied one at a time.
|
|
||||||
|
|
||||||
* Add a /failed-builds/ endpoint that lists the most recent failed builds.
|
|
||||||
* By default don't display failed builds on the front page.
|
|
||||||
* Times are printed with the 'Z' time zone offset indicator.
|
|
||||||
* Link to comparisons of builds take into account whether the "input", among others the list of dependencies, is different.
|
|
||||||
* New subcommand `builder-db extract-build` takes a build UUID and extracts the builder "full" file.
|
|
||||||
* Add /job/<job>/build/<build>/all.tar.gz endpoint with a gzip compressed tar archive of all build artifacts.
|
|
||||||
* Visual overhaul.
|
|
||||||
* Add (optional) visualizations displaying package dependencies ("opam-graph") and for unikernels a "modulectomy" view of how much each OCaml module is contributing to the final binary size. The visualizations are read from a cache on disk and can be generated from a script.
|
|
||||||
* A script hook is added on file upload. It may be used to generate visualizations or publish system packages to a repository.
|
|
||||||
* The 404 file not found page tries to be more informative.
|
|
||||||
* The build page for a unikernel build displays the solo5 device manifest, e.g. `with block devices "storage", and net devices "service"`.
|
|
||||||
* URLs with trailing slash redirect to without the trailing slash.
|
|
||||||
* Builder-web will try to be more helpful if its database doesn't exist or the database version is wrong.
|
|
||||||
* The opam diff works for mirage 4 unikernels taking into account the opam-monorepo/duniverse packages.
|
|
||||||
* Markdown rendering is now done using cmarkit instead of omd.
|
|
||||||
* Builder-web doesn't display jobs older than 30 days (customizable with `--expired-jobs` command line argument) on the front page.
|
|
||||||
* Build artifacts are stored by their content, and artifacts are automatically deduplicated. This makes builder-web much more space efficient on deployments that don't use deduplication on the filesystem level.
|
|
||||||
* New subcommands `builder-db vacuum *` to remove older builds. Can be called from a cron job to keep disk usage bounded.
|
|
||||||
* Lots of other improvements and bug fixes.
|
|
||||||
|
|
||||||
## v0.1.0 (2021-11-12)
|
|
||||||
|
|
||||||
* Initial public release
|
* Initial public release
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
# Builder-web - a web frontend for reproducible builds
|
# Builder-web - a web frontend for reproducible builds
|
||||||
|
|
||||||
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
Builder-web takes in submissions of builds, typically from [builder](https://github.com/roburio/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
||||||
Produced binaries can be downloaded and executed.
|
Produced binaries can be downloaded and executed.
|
||||||
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
|
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
Builder-web is a single binary web server using a sqlite3 database with versioned schemas.
|
Builder-web is a single binary web server using a sqlite3 database with versioned schemas.
|
||||||
Finished builds from [builder](https://github.com/robur-coop/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user.
|
Finished builds from [builder](https://github.com/roburio/builder/) are uploaded to builder-web, stored and indexed in the database and presented in the web interface to the user.
|
||||||
Users can:
|
Users can:
|
||||||
|
|
||||||
* Get an overview of *jobs* - a job is typically script or opam package that is run and builds an artifact,
|
* Get an overview of *jobs* - a job is typically script or opam package that is run and builds an artifact,
|
||||||
|
|
|
@ -12,9 +12,9 @@ let scrypt_params ?(scrypt_n = 16384) ?(scrypt_r = 8) ?(scrypt_p = 1) () =
|
||||||
{ scrypt_n; scrypt_r; scrypt_p }
|
{ scrypt_n; scrypt_r; scrypt_p }
|
||||||
|
|
||||||
type pbkdf2_sha256 =
|
type pbkdf2_sha256 =
|
||||||
[ `Pbkdf2_sha256 of string * string * pbkdf2_sha256_params ]
|
[ `Pbkdf2_sha256 of Cstruct.t * Cstruct.t * pbkdf2_sha256_params ]
|
||||||
|
|
||||||
type scrypt = [ `Scrypt of string * string * scrypt_params ]
|
type scrypt = [ `Scrypt of Cstruct.t * Cstruct.t * scrypt_params ]
|
||||||
|
|
||||||
type password_hash = [ pbkdf2_sha256 | scrypt ]
|
type password_hash = [ pbkdf2_sha256 | scrypt ]
|
||||||
|
|
||||||
|
@ -25,10 +25,10 @@ type 'a user_info = {
|
||||||
}
|
}
|
||||||
|
|
||||||
let pbkdf2_sha256 ~params:{ pbkdf2_sha256_iter = count } ~salt ~password =
|
let pbkdf2_sha256 ~params:{ pbkdf2_sha256_iter = count } ~salt ~password =
|
||||||
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password
|
Pbkdf.pbkdf2 ~prf:`SHA256 ~count ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
|
||||||
|
|
||||||
let scrypt ~params:{ scrypt_n = n; scrypt_r = r; scrypt_p = p } ~salt ~password =
|
let scrypt ~params:{ scrypt_n = n; scrypt_r = r; scrypt_p = p } ~salt ~password =
|
||||||
Scrypt.scrypt ~n ~r ~p ~dk_len:32l ~salt ~password
|
Scrypt_kdf.scrypt_kdf ~n ~r ~p ~dk_len:32l ~salt ~password:(Cstruct.of_string password)
|
||||||
|
|
||||||
let hash ?(scrypt_params=scrypt_params ())
|
let hash ?(scrypt_params=scrypt_params ())
|
||||||
~username ~password ~restricted () =
|
~username ~password ~restricted () =
|
||||||
|
@ -43,10 +43,10 @@ let hash ?(scrypt_params=scrypt_params ())
|
||||||
let verify_password password user_info =
|
let verify_password password user_info =
|
||||||
match user_info.password_hash with
|
match user_info.password_hash with
|
||||||
| `Pbkdf2_sha256 (password_hash, salt, params) ->
|
| `Pbkdf2_sha256 (password_hash, salt, params) ->
|
||||||
String.equal
|
Cstruct.equal
|
||||||
(pbkdf2_sha256 ~params ~salt ~password)
|
(pbkdf2_sha256 ~params ~salt ~password)
|
||||||
password_hash
|
password_hash
|
||||||
| `Scrypt (password_hash, salt, params) ->
|
| `Scrypt (password_hash, salt, params) ->
|
||||||
String.equal
|
Cstruct.equal
|
||||||
(scrypt ~params ~salt ~password)
|
(scrypt ~params ~salt ~password)
|
||||||
password_hash
|
password_hash
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
(library
|
(library
|
||||||
(name builder_web_auth)
|
(name builder_web_auth)
|
||||||
(libraries kdf.pbkdf kdf.scrypt mirage-crypto-rng))
|
(libraries pbkdf scrypt-kdf mirage-crypto-rng))
|
||||||
|
|
|
@ -16,13 +16,6 @@ let defer_foreign_keys =
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
Caqti_type.unit ->. Caqti_type.unit @@
|
||||||
"PRAGMA defer_foreign_keys = ON"
|
"PRAGMA defer_foreign_keys = ON"
|
||||||
|
|
||||||
let build_artifacts_to_orphan =
|
|
||||||
Builder_db.Rep.id `build ->* Caqti_type.octets @@
|
|
||||||
{| SELECT a.sha256 FROM build_artifact a
|
|
||||||
WHERE a.build = ? AND
|
|
||||||
(SELECT COUNT(*) FROM build_artifact a2
|
|
||||||
WHERE a2.sha256 = a.sha256 AND a2.build <> a.build) = 0 |}
|
|
||||||
|
|
||||||
let connect uri =
|
let connect uri =
|
||||||
let* (module Db : Caqti_blocking.CONNECTION) = Caqti_blocking.connect ~tweaks_version:(1,8) uri in
|
let* (module Db : Caqti_blocking.CONNECTION) = Caqti_blocking.connect ~tweaks_version:(1,8) uri in
|
||||||
let* () = Db.exec defer_foreign_keys () in
|
let* () = Db.exec defer_foreign_keys () in
|
||||||
|
@ -43,16 +36,6 @@ let do_migrate dbpath =
|
||||||
let migrate () dbpath =
|
let migrate () dbpath =
|
||||||
or_die 1 (do_migrate dbpath)
|
or_die 1 (do_migrate dbpath)
|
||||||
|
|
||||||
let artifacts_dir datadir = Fpath.(datadir / "_artifacts")
|
|
||||||
let artifact_path sha256 =
|
|
||||||
let sha256 = Ohex.encode sha256 in
|
|
||||||
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
|
|
||||||
(* NOTE: We add the prefix to reduce the number of files in a directory - a
|
|
||||||
workaround for inferior filesystems. We can easily revert this by changing
|
|
||||||
this function and adding a migration. *)
|
|
||||||
let prefix = String.sub sha256 0 2 in
|
|
||||||
Fpath.(v "_artifacts" / prefix / sha256)
|
|
||||||
|
|
||||||
let user_mod action dbpath scrypt_n scrypt_r scrypt_p username unrestricted =
|
let user_mod action dbpath scrypt_n scrypt_r scrypt_p username unrestricted =
|
||||||
let scrypt_params = Builder_web_auth.scrypt_params ?scrypt_n ?scrypt_r ?scrypt_p () in
|
let scrypt_params = Builder_web_auth.scrypt_params ?scrypt_n ?scrypt_r ?scrypt_p () in
|
||||||
let r =
|
let r =
|
||||||
|
@ -108,7 +91,7 @@ let user_disable () dbpath username =
|
||||||
match user with
|
match user with
|
||||||
| None -> Error (`Msg "user not found")
|
| None -> Error (`Msg "user not found")
|
||||||
| Some (_, user_info) ->
|
| Some (_, user_info) ->
|
||||||
let password_hash = `Scrypt ("", "", Builder_web_auth.scrypt_params ()) in
|
let password_hash = `Scrypt (Cstruct.empty, Cstruct.empty, Builder_web_auth.scrypt_params ()) in
|
||||||
let user_info = { user_info with password_hash ; restricted = true } in
|
let user_info = { user_info with password_hash ; restricted = true } in
|
||||||
Db.exec Builder_db.User.update_user user_info
|
Db.exec Builder_db.User.update_user user_info
|
||||||
in
|
in
|
||||||
|
@ -148,26 +131,6 @@ let access_remove () dbpath username jobname =
|
||||||
in
|
in
|
||||||
or_die 1 r
|
or_die 1 r
|
||||||
|
|
||||||
let delete_build datadir (module Db : Caqti_blocking.CONNECTION) jobname id uuid =
|
|
||||||
let dir = Fpath.(v datadir / jobname / Uuidm.to_string uuid) in
|
|
||||||
(match Bos.OS.Dir.delete ~recurse:true dir with
|
|
||||||
| Ok _ -> ()
|
|
||||||
| Error `Msg e -> Logs.warn (fun m -> m "failed to remove build directory %a: %s" Fpath.pp dir e));
|
|
||||||
let* () =
|
|
||||||
Db.iter_s build_artifacts_to_orphan
|
|
||||||
(fun sha256 ->
|
|
||||||
let p = Fpath.(v datadir // artifact_path sha256) in
|
|
||||||
match Bos.OS.Path.delete p with
|
|
||||||
| Ok () -> Ok ()
|
|
||||||
| Error `Msg e ->
|
|
||||||
Logs.warn (fun m -> m "failed to remove orphan artifact %a: %s"
|
|
||||||
Fpath.pp p e);
|
|
||||||
Ok ())
|
|
||||||
id
|
|
||||||
in
|
|
||||||
let* () = Db.exec Builder_db.Build_artifact.remove_by_build id in
|
|
||||||
Db.exec Builder_db.Build.remove id
|
|
||||||
|
|
||||||
let job_remove () datadir jobname =
|
let job_remove () datadir jobname =
|
||||||
let dbpath = datadir ^ "/builder.sqlite3" in
|
let dbpath = datadir ^ "/builder.sqlite3" in
|
||||||
let r =
|
let r =
|
||||||
|
@ -187,7 +150,12 @@ let job_remove () datadir jobname =
|
||||||
let* () =
|
let* () =
|
||||||
List.fold_left (fun r (build_id, build) ->
|
List.fold_left (fun r (build_id, build) ->
|
||||||
let* () = r in
|
let* () = r in
|
||||||
delete_build datadir (module Db) jobname build_id build.Builder_db.Build.uuid)
|
let dir = Fpath.(v datadir / jobname / Uuidm.to_string build.Builder_db.Build.uuid) in
|
||||||
|
(match Bos.OS.Dir.delete ~recurse:true dir with
|
||||||
|
| Ok _ -> ()
|
||||||
|
| Error `Msg e -> Logs.warn (fun m -> m "failed to remove build directory %a: %s" Fpath.pp dir e));
|
||||||
|
let* () = Db.exec Builder_db.Build_artifact.remove_by_build build_id in
|
||||||
|
Db.exec Builder_db.Build.remove build_id)
|
||||||
(Ok ())
|
(Ok ())
|
||||||
builds
|
builds
|
||||||
in
|
in
|
||||||
|
@ -205,103 +173,13 @@ let job_remove () datadir jobname =
|
||||||
in
|
in
|
||||||
or_die 1 r
|
or_die 1 r
|
||||||
|
|
||||||
let vacuum datadir (module Db : Caqti_blocking.CONNECTION) platform_opt job_id predicate =
|
|
||||||
let* jobname = Db.find Builder_db.Job.get job_id in
|
|
||||||
let* builds =
|
|
||||||
match predicate with
|
|
||||||
| `Date older_than ->
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, platform_opt, older_than)
|
|
||||||
| `Latest latest_n ->
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, platform_opt, latest_n)
|
|
||||||
| `Latest_successful latest_n ->
|
|
||||||
let* latest_n =
|
|
||||||
Db.find_opt Builder_db.Build.get_nth_latest_successful
|
|
||||||
(job_id, platform_opt, latest_n)
|
|
||||||
in
|
|
||||||
match latest_n with
|
|
||||||
| None ->
|
|
||||||
Ok []
|
|
||||||
| Some (id, latest_n) ->
|
|
||||||
let+ builds =
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_older_than
|
|
||||||
(job_id, platform_opt, latest_n.finish)
|
|
||||||
in
|
|
||||||
(* Unfortunately, get_builds_older_than is non-strict comparison;
|
|
||||||
so we need to filter out [latest_n]. *)
|
|
||||||
List.filter (fun (id', _) -> id <> id') builds
|
|
||||||
in
|
|
||||||
let pp_reason ppf = function
|
|
||||||
| `Date older_than ->
|
|
||||||
Format.fprintf ppf "has no builds older than %a" (Ptime.pp_rfc3339 ()) older_than
|
|
||||||
| `Latest n ->
|
|
||||||
Format.fprintf ppf "has fewer than %d builds" n
|
|
||||||
| `Latest_successful n ->
|
|
||||||
Format.fprintf ppf "has fewer than %d successful builds" n
|
|
||||||
in
|
|
||||||
if builds = [] then
|
|
||||||
(* NOTE: this function may be called on *all* jobs, and in that case maybe
|
|
||||||
this is too verbose? *)
|
|
||||||
Logs.info (fun m -> m "Job %s %a; not removing any builds"
|
|
||||||
jobname pp_reason predicate);
|
|
||||||
List.fold_left (fun r (build_id, build) ->
|
|
||||||
let* () = r in
|
|
||||||
let* () = Db.start () in
|
|
||||||
let* () = Db.exec defer_foreign_keys () in
|
|
||||||
match
|
|
||||||
delete_build datadir (module Db) jobname build_id
|
|
||||||
build.Builder_db.Build.uuid
|
|
||||||
with
|
|
||||||
| Ok () -> Db.commit ()
|
|
||||||
| Error _ as e ->
|
|
||||||
let* () = Db.rollback () in
|
|
||||||
e)
|
|
||||||
(Ok ())
|
|
||||||
builds
|
|
||||||
|
|
||||||
let vacuum () datadir platform_opt jobnames predicate =
|
|
||||||
let dbpath = datadir ^ "/builder.sqlite3" in
|
|
||||||
let r =
|
|
||||||
let* (module Db : Caqti_blocking.CONNECTION) =
|
|
||||||
connect (Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
|
||||||
in
|
|
||||||
let* jobs =
|
|
||||||
match jobnames with
|
|
||||||
| [] ->
|
|
||||||
(* We default to all jobs if no jobnames were specified *)
|
|
||||||
let* jobs = Db.collect_list Builder_db.Job.get_all_with_section_synopsis () in
|
|
||||||
Ok (List.map (fun (job_id, _, _, _) -> job_id) jobs)
|
|
||||||
| _ :: _ ->
|
|
||||||
let* (jobs, unknown_jobnames) =
|
|
||||||
List.fold_left
|
|
||||||
(fun r jobname ->
|
|
||||||
let* (jobs, unknown_jobnames) = r in
|
|
||||||
let* job_id_opt = Db.find_opt Builder_db.Job.get_id_by_name jobname in
|
|
||||||
match job_id_opt with
|
|
||||||
| Some job_id -> Ok (job_id :: jobs, unknown_jobnames)
|
|
||||||
| None -> Ok (jobs, jobname :: unknown_jobnames))
|
|
||||||
(Ok ([], []))
|
|
||||||
jobnames
|
|
||||||
in
|
|
||||||
match unknown_jobnames with
|
|
||||||
| [] -> Ok jobs
|
|
||||||
| _ :: _ ->
|
|
||||||
Error (`Msg ("Unknown job(s): " ^ String.concat ", " unknown_jobnames))
|
|
||||||
in
|
|
||||||
List.fold_left (fun r jobid ->
|
|
||||||
let* () = r in
|
|
||||||
vacuum datadir (module Db) platform_opt jobid predicate)
|
|
||||||
(Ok ())
|
|
||||||
jobs
|
|
||||||
in
|
|
||||||
or_die 1 r
|
|
||||||
|
|
||||||
let input_ids =
|
let input_ids =
|
||||||
Caqti_type.unit ->* Caqti_type.octets @@
|
Caqti_type.unit ->* Builder_db.Rep.cstruct @@
|
||||||
"SELECT DISTINCT input_id FROM build WHERE input_id IS NOT NULL"
|
"SELECT DISTINCT input_id FROM build WHERE input_id IS NOT NULL"
|
||||||
|
|
||||||
let main_artifact_hash =
|
let main_artifact_hash =
|
||||||
Caqti_type.octets ->*
|
Builder_db.Rep.cstruct ->*
|
||||||
Caqti_type.t3 Caqti_type.octets Builder_db.Rep.uuid Caqti_type.string @@
|
Caqti_type.tup3 Builder_db.Rep.cstruct Builder_db.Rep.uuid Caqti_type.string @@
|
||||||
{|
|
{|
|
||||||
SELECT a.sha256, b.uuid, j.name FROM build_artifact a, build b, job j
|
SELECT a.sha256, b.uuid, j.name FROM build_artifact a, build b, job j
|
||||||
WHERE b.input_id = ? AND a.id = b.main_binary AND b.job = j.id
|
WHERE b.input_id = ? AND a.id = b.main_binary AND b.job = j.id
|
||||||
|
@ -319,12 +197,12 @@ let verify_input_id () dbpath =
|
||||||
match hashes with
|
match hashes with
|
||||||
| (h, uuid, jobname) :: tl ->
|
| (h, uuid, jobname) :: tl ->
|
||||||
List.iter (fun (h', uuid', _) ->
|
List.iter (fun (h', uuid', _) ->
|
||||||
if String.equal h h' then
|
if Cstruct.equal h h' then
|
||||||
()
|
()
|
||||||
else
|
else
|
||||||
Logs.warn (fun m -> m "job %s input id %a with two different hashes (%a, %a), build %a and %a"
|
Logs.warn (fun m -> m "job %s input id %a with two different hashes (%a, %a), build %a and %a"
|
||||||
jobname Ohex.pp input_id
|
jobname Cstruct.hexdump_pp input_id
|
||||||
Ohex.pp h Ohex.pp h'
|
Cstruct.hexdump_pp h Cstruct.hexdump_pp h'
|
||||||
Uuidm.pp uuid Uuidm.pp uuid'))
|
Uuidm.pp uuid Uuidm.pp uuid'))
|
||||||
tl
|
tl
|
||||||
| [] -> ())
|
| [] -> ())
|
||||||
|
@ -336,17 +214,18 @@ let num_build_artifacts =
|
||||||
Caqti_type.unit ->! Caqti_type.int @@
|
Caqti_type.unit ->! Caqti_type.int @@
|
||||||
"SELECT count(*) FROM build_artifact"
|
"SELECT count(*) FROM build_artifact"
|
||||||
|
|
||||||
let build_artifacts : (unit, string * Uuidm.t * Fpath.t * string * int64, [ `One | `Zero | `Many ]) Caqti_request.t =
|
let build_artifacts : (unit, string * Uuidm.t * (Fpath.t * Fpath.t * Cstruct.t * int64), [ `One | `Zero | `Many ]) Caqti_request.t =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t5 string Builder_db.Rep.uuid Builder_db.Rep.fpath octets int64)
|
Caqti_type.(tup3 string Builder_db.Rep.uuid
|
||||||
|
(tup4 Builder_db.Rep.fpath Builder_db.Rep.fpath Builder_db.Rep.cstruct int64))
|
||||||
@@
|
@@
|
||||||
{| SELECT job.name, b.uuid, a.filepath, a.sha256, a.size
|
{| SELECT job.name, b.uuid, a.filepath, a.localpath, a.sha256, a.size
|
||||||
FROM build_artifact a, build b, job
|
FROM build_artifact a, build b, job
|
||||||
WHERE a.build = b.id AND b.job = job.id |}
|
WHERE a.build = b.id AND b.job = job.id |}
|
||||||
|
|
||||||
let script_and_console : (unit, _, [`One | `Zero | `Many ]) Caqti_request.t =
|
let script_and_console : (unit, _, [`One | `Zero | `Many ]) Caqti_request.t =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t4 string Builder_db.Rep.uuid Builder_db.Rep.fpath Builder_db.Rep.fpath)
|
Caqti_type.(tup4 string Builder_db.Rep.uuid Builder_db.Rep.fpath Builder_db.Rep.fpath)
|
||||||
@@
|
@@
|
||||||
{| SELECT job.name, b.uuid, b.console, b.script
|
{| SELECT job.name, b.uuid, b.console, b.script
|
||||||
FROM build b, job
|
FROM build b, job
|
||||||
|
@ -378,32 +257,35 @@ let verify_data_dir () datadir =
|
||||||
let idx = ref 0 in
|
let idx = ref 0 in
|
||||||
fun () -> incr idx; if !idx mod 100 = 0 then Logs.info (fun m -> m "%d" !idx);
|
fun () -> incr idx; if !idx mod 100 = 0 then Logs.info (fun m -> m "%d" !idx);
|
||||||
in
|
in
|
||||||
let verify_job_and_uuid job uuid path =
|
let verify_job_and_uuid ?fpath job uuid path =
|
||||||
match Fpath.segs path with
|
match Fpath.segs path with
|
||||||
| job' :: uuid' :: _tl ->
|
| job' :: uuid' :: tl ->
|
||||||
if String.equal job job' then () else Logs.warn (fun m -> m "job names do not match: %s vs %s" job job');
|
if String.equal job job' then () else Logs.warn (fun m -> m "job names do not match: %s vs %s" job job');
|
||||||
if String.equal (Uuidm.to_string uuid) uuid' then () else Logs.warn (fun m -> m "uuid does not match: %s vs %s" (Uuidm.to_string uuid) uuid');
|
if String.equal (Uuidm.to_string uuid) uuid' then () else Logs.warn (fun m -> m "uuid does not match: %s vs %s" (Uuidm.to_string uuid) uuid');
|
||||||
|
(match fpath, tl with
|
||||||
|
| None, _ -> ()
|
||||||
|
| Some f, "output" :: tl ->
|
||||||
|
if Fpath.equal (Fpath.v (String.concat "/" tl)) f then
|
||||||
|
()
|
||||||
|
else
|
||||||
|
Logs.err (fun m -> m "path (%a) and fpath (%a) do not match" Fpath.pp path Fpath.pp f)
|
||||||
|
| Some _, _ ->
|
||||||
|
Logs.err (fun m -> m "path is not of form <job>/<uuid>/output/<filename>: %a" Fpath.pp path))
|
||||||
| _ -> Logs.err (fun m -> m "path is not of form <job>/<uuid>/...: %a" Fpath.pp path)
|
| _ -> Logs.err (fun m -> m "path is not of form <job>/<uuid>/...: %a" Fpath.pp path)
|
||||||
in
|
in
|
||||||
let* () =
|
let* () =
|
||||||
Db.iter_s build_artifacts (fun (_job, _uuid, _fpath, sha256, size) ->
|
Db.iter_s build_artifacts (fun (job, uuid, (fpath, lpath, sha, size)) ->
|
||||||
progress ();
|
progress ();
|
||||||
if not (FpathSet.mem (artifact_path sha256) !files_tracked) then
|
verify_job_and_uuid ~fpath job uuid lpath;
|
||||||
let abs_path = Fpath.(v datadir // artifact_path sha256) in
|
let abs_path = Fpath.(v datadir // lpath) in
|
||||||
(match Bos.OS.File.read abs_path with
|
(match Bos.OS.File.read abs_path with
|
||||||
| Error (`Msg msg) -> Logs.err (fun m -> m "file %a not present: %s" Fpath.pp abs_path msg)
|
| Error (`Msg msg) -> Logs.err (fun m -> m "file %a not present: %s" Fpath.pp abs_path msg)
|
||||||
| Ok data ->
|
| Ok data ->
|
||||||
files_tracked := FpathSet.add (artifact_path sha256) !files_tracked;
|
files_tracked := FpathSet.add lpath !files_tracked;
|
||||||
let s = Int64.of_int (String.length data) in
|
let s = Int64.of_int (String.length data) in
|
||||||
if s <> size then Logs.err (fun m -> m "File %a has different size (in DB %Lu on disk %Lu)" Fpath.pp abs_path size s);
|
if s <> size then Logs.err (fun m -> m "File %a has different size (in DB %Lu on disk %Lu)" Fpath.pp abs_path size s);
|
||||||
let sha256' = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
let sh = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||||
if not (String.equal sha256 sha256') then
|
if not (Cstruct.equal sha sh) then Logs.err (fun m -> m "File %a has different hash (in DB %a on disk %a" Fpath.pp abs_path Cstruct.hexdump_pp sha Cstruct.hexdump_pp sh)) ;
|
||||||
Logs.err (fun m -> m "File %a has different hash (in DB %a on disk %a)"
|
|
||||||
Fpath.pp abs_path
|
|
||||||
Ohex.pp sha256
|
|
||||||
Ohex.pp sha256')) ;
|
|
||||||
Ok ()
|
|
||||||
else
|
|
||||||
Ok ()
|
Ok ()
|
||||||
) ()
|
) ()
|
||||||
in
|
in
|
||||||
|
@ -544,8 +426,8 @@ module Verify_cache_dir = struct
|
||||||
type t = {
|
type t = {
|
||||||
uuid : Uuidm.t;
|
uuid : Uuidm.t;
|
||||||
job_name : string;
|
job_name : string;
|
||||||
hash_opam_switch : string option;
|
hash_opam_switch : Cstruct.t option;
|
||||||
hash_debug_bin : string option;
|
hash_debug_bin : Cstruct.t option;
|
||||||
}
|
}
|
||||||
|
|
||||||
let repr =
|
let repr =
|
||||||
|
@ -556,11 +438,11 @@ module Verify_cache_dir = struct
|
||||||
in
|
in
|
||||||
Caqti_type.custom ~encode ~decode
|
Caqti_type.custom ~encode ~decode
|
||||||
Caqti_type.(
|
Caqti_type.(
|
||||||
t4
|
tup4
|
||||||
Builder_db.Rep.uuid
|
Builder_db.Rep.uuid
|
||||||
string
|
string
|
||||||
(option octets)
|
(option Builder_db.Rep.cstruct)
|
||||||
(option octets))
|
(option Builder_db.Rep.cstruct))
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -572,13 +454,12 @@ module Verify_cache_dir = struct
|
||||||
ba_opam_switch.sha256 hash_opam_switch,
|
ba_opam_switch.sha256 hash_opam_switch,
|
||||||
ba_debug_bin.sha256 hash_debug_bin
|
ba_debug_bin.sha256 hash_debug_bin
|
||||||
FROM build AS b
|
FROM build AS b
|
||||||
WHERE b.main_binary IS NOT NULL
|
|
||||||
LEFT JOIN build_artifact AS ba_opam_switch ON
|
LEFT JOIN build_artifact AS ba_opam_switch ON
|
||||||
ba_opam_switch.build = b.id
|
ba_opam_switch.build = b.id
|
||||||
AND ba_opam_switch.filepath = 'opam-switch'
|
AND ba_opam_switch.filepath = 'opam-switch'
|
||||||
LEFT JOIN build_artifact AS ba_debug_bin ON
|
LEFT JOIN build_artifact AS ba_debug_bin ON
|
||||||
ba_debug_bin.build = b.id
|
ba_debug_bin.build = b.id
|
||||||
AND ba_debug_bin.filepath LIKE '%.debug'
|
AND ba_debug_bin.localpath LIKE '%.debug'
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let check_viz_nonempty ~cachedir ~viz_typ ~hash =
|
let check_viz_nonempty ~cachedir ~viz_typ ~hash =
|
||||||
|
@ -586,7 +467,7 @@ module Verify_cache_dir = struct
|
||||||
let* latest_version =
|
let* latest_version =
|
||||||
Viz_aux.get_viz_version_from_dirs ~cachedir ~viz_typ
|
Viz_aux.get_viz_version_from_dirs ~cachedir ~viz_typ
|
||||||
in
|
in
|
||||||
let viz_input_hash = Ohex.encode hash in
|
let `Hex viz_input_hash = Hex.of_cstruct hash in
|
||||||
let* viz_path =
|
let* viz_path =
|
||||||
Viz_aux.choose_versioned_viz_path
|
Viz_aux.choose_versioned_viz_path
|
||||||
~cachedir
|
~cachedir
|
||||||
|
@ -666,7 +547,7 @@ module Verify_cache_dir = struct
|
||||||
match extract_hash ~viz_typ build with
|
match extract_hash ~viz_typ build with
|
||||||
| None -> ()
|
| None -> ()
|
||||||
| Some input_hash ->
|
| Some input_hash ->
|
||||||
let input_hash = Ohex.encode input_hash in
|
let `Hex input_hash = Hex.of_cstruct input_hash in
|
||||||
let viz_path = Viz_aux.viz_path
|
let viz_path = Viz_aux.viz_path
|
||||||
~cachedir
|
~cachedir
|
||||||
~viz_typ
|
~viz_typ
|
||||||
|
@ -740,8 +621,8 @@ end
|
||||||
module Asn = struct
|
module Asn = struct
|
||||||
let decode_strict codec cs =
|
let decode_strict codec cs =
|
||||||
match Asn.decode codec cs with
|
match Asn.decode codec cs with
|
||||||
| Ok (a, rest) ->
|
| Ok (a, cs) ->
|
||||||
if String.length rest = 0
|
if Cstruct.length cs = 0
|
||||||
then Ok a
|
then Ok a
|
||||||
else Error "trailing bytes"
|
else Error "trailing bytes"
|
||||||
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
||||||
|
@ -799,16 +680,59 @@ let extract_full () datadir dest uuid =
|
||||||
let out = console_of_string console in
|
let out = console_of_string console in
|
||||||
let* artifacts = Db.collect_list Builder_db.Build_artifact.get_all_by_build build_id in
|
let* artifacts = Db.collect_list Builder_db.Build_artifact.get_all_by_build build_id in
|
||||||
let* data =
|
let* data =
|
||||||
List.fold_left (fun acc (_, { Builder_db.filepath; sha256; _ }) ->
|
List.fold_left (fun acc (_, { Builder_db.filepath; localpath; _ }) ->
|
||||||
let* acc = acc in
|
let* acc = acc in
|
||||||
let* data = Bos.OS.File.read Fpath.(v datadir // artifact_path sha256) in
|
let* data = Bos.OS.File.read Fpath.(v datadir // localpath) in
|
||||||
Ok ((filepath, data) :: acc))
|
Ok ((filepath, data) :: acc))
|
||||||
(Ok [])
|
(Ok [])
|
||||||
artifacts
|
artifacts
|
||||||
in
|
in
|
||||||
let exec = (job, uuid, out, start, finish, result, data) in
|
let exec = (job, uuid, out, start, finish, result, data) in
|
||||||
let data = Builder.Asn.exec_to_str exec in
|
let cs = Builder.Asn.exec_to_cs exec in
|
||||||
Bos.OS.File.write (Fpath.v dest) data
|
Bos.OS.File.write (Fpath.v dest) (Cstruct.to_string cs)
|
||||||
|
in
|
||||||
|
or_die 1 r
|
||||||
|
|
||||||
|
let time_size_graph () datadir jobname =
|
||||||
|
let dbpath = datadir ^ "/builder.sqlite3" in
|
||||||
|
let r =
|
||||||
|
let* (module Db : Caqti_blocking.CONNECTION) =
|
||||||
|
connect (Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||||
|
in
|
||||||
|
let* job_id =
|
||||||
|
Result.bind (Db.find_opt Builder_db.Job.get_id_by_name jobname)
|
||||||
|
(Option.to_result ~none:(`Msg "job not found"))
|
||||||
|
in
|
||||||
|
let* shas =
|
||||||
|
Db.collect_list Builder_db.Build.get_all_artifact_sha (job_id, None)
|
||||||
|
in
|
||||||
|
let* builds =
|
||||||
|
List.fold_left (fun acc hash ->
|
||||||
|
match acc with
|
||||||
|
| Error _ as e -> e
|
||||||
|
| Ok builds ->
|
||||||
|
let* b =
|
||||||
|
Db.find Builder_db.Build.get_with_main_binary_by_hash hash
|
||||||
|
in
|
||||||
|
Ok (b :: builds))
|
||||||
|
(Ok []) shas
|
||||||
|
in
|
||||||
|
Printf.printf "# build times and binary sizes for job %s\n" jobname;
|
||||||
|
Printf.printf "# build start (seconds since epoch) <TAB> build duration (seconds) <TAB> binary size (bytes) # UUID\n";
|
||||||
|
List.iter (fun (build, file) ->
|
||||||
|
match file with
|
||||||
|
| None ->
|
||||||
|
Printf.eprintf "no file for build %s\n" (Uuidm.to_string build.Builder_db.Build.uuid)
|
||||||
|
| Some f ->
|
||||||
|
Printf.printf "%u\t%u\t%u\t# %s\n"
|
||||||
|
(match Ptime.Span.to_int_s (Ptime.to_span build.start) with
|
||||||
|
| None -> assert false | Some s -> s)
|
||||||
|
(match Ptime.Span.to_int_s (Ptime.diff build.finish build.start) with
|
||||||
|
| None -> assert false | Some s -> s)
|
||||||
|
f.Builder_db.size
|
||||||
|
(Uuidm.to_string build.Builder_db.Build.uuid))
|
||||||
|
builds;
|
||||||
|
Ok ()
|
||||||
in
|
in
|
||||||
or_die 1 r
|
or_die 1 r
|
||||||
|
|
||||||
|
@ -820,89 +744,81 @@ let help man_format cmds = function
|
||||||
else `Error (true, "Unknown command: " ^ cmd)
|
else `Error (true, "Unknown command: " ^ cmd)
|
||||||
|
|
||||||
let dbpath =
|
let dbpath =
|
||||||
let doc = "sqlite3 database path." in
|
let doc = "sqlite3 database path" in
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt non_dir_file (Builder_system.default_datadir ^ "/builder.sqlite3") &
|
opt non_dir_file (Builder_system.default_datadir ^ "/builder.sqlite3") &
|
||||||
info ~doc ["dbpath"])
|
info ~doc ["dbpath"])
|
||||||
|
|
||||||
let dbpath_new =
|
let dbpath_new =
|
||||||
let doc = "sqlite3 database path." in
|
let doc = "sqlite3 database path" in
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt string (Builder_system.default_datadir ^ "/builder.sqlite3") &
|
opt string (Builder_system.default_datadir ^ "/builder.sqlite3") &
|
||||||
info ~doc ["dbpath"])
|
info ~doc ["dbpath"])
|
||||||
|
|
||||||
let datadir =
|
let datadir =
|
||||||
let doc = "Data directory." in
|
let doc = "data directory" in
|
||||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
|
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt dir Builder_system.default_datadir &
|
opt dir Builder_system.default_datadir &
|
||||||
info ~doc ~env ["datadir"; "d"])
|
info ~doc ["datadir"; "d"])
|
||||||
|
|
||||||
let cachedir =
|
let cachedir =
|
||||||
let doc = "Cache directory." in
|
let doc = "cache directory" in
|
||||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_CACHEDIR" in
|
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt (some dir) None &
|
opt (some dir) None &
|
||||||
info ~doc ~env ["cachedir"])
|
info ~doc ["cachedir"])
|
||||||
|
|
||||||
let jobname =
|
let jobname =
|
||||||
let doc = "Jobname." in
|
let doc = "jobname" in
|
||||||
Cmdliner.Arg.(required &
|
Cmdliner.Arg.(required &
|
||||||
pos 0 (some string) None &
|
pos 0 (some string) None &
|
||||||
info ~doc ~docv:"JOBNAME" [])
|
info ~doc ~docv:"JOBNAME" [])
|
||||||
|
|
||||||
let username =
|
let username =
|
||||||
let doc = "Username." in
|
let doc = "username" in
|
||||||
Cmdliner.Arg.(required &
|
Cmdliner.Arg.(required &
|
||||||
pos 0 (some string) None &
|
pos 0 (some string) None &
|
||||||
info ~doc ~docv:"USERNAME" [])
|
info ~doc ~docv:"USERNAME" [])
|
||||||
|
|
||||||
let password_iter =
|
let password_iter =
|
||||||
let doc = "Password hash count." in
|
let doc = "password hash count" in
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt (some int) None &
|
opt (some int) None &
|
||||||
info ~doc ["hash-count"])
|
info ~doc ["hash-count"])
|
||||||
|
|
||||||
let scrypt_n =
|
let scrypt_n =
|
||||||
let doc = "scrypt n parameter." in
|
let doc = "scrypt n parameter" in
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt (some int) None &
|
opt (some int) None &
|
||||||
info ~doc ["scrypt-n"])
|
info ~doc ["scrypt-n"])
|
||||||
|
|
||||||
let scrypt_r =
|
let scrypt_r =
|
||||||
let doc = "scrypt r parameter." in
|
let doc = "scrypt r parameter" in
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt (some int) None &
|
opt (some int) None &
|
||||||
info ~doc ["scrypt-r"])
|
info ~doc ["scrypt-r"])
|
||||||
|
|
||||||
let scrypt_p =
|
let scrypt_p =
|
||||||
let doc = "scrypt p parameter." in
|
let doc = "scrypt p parameter" in
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt (some int) None &
|
opt (some int) None &
|
||||||
info ~doc ["scrypt-p"])
|
info ~doc ["scrypt-p"])
|
||||||
|
|
||||||
let unrestricted =
|
let unrestricted =
|
||||||
let doc = "Unrestricted user." in
|
let doc = "unrestricted user" in
|
||||||
Cmdliner.Arg.(value & flag & info ~doc [ "unrestricted" ])
|
Cmdliner.Arg.(value & flag & info ~doc [ "unrestricted" ])
|
||||||
|
|
||||||
let job =
|
let job =
|
||||||
let doc = "Job." in
|
let doc = "job" in
|
||||||
Cmdliner.Arg.(required &
|
Cmdliner.Arg.(required &
|
||||||
pos 1 (some string) None &
|
pos 1 (some string) None &
|
||||||
info ~doc ~docv:"JOB" [])
|
info ~doc ~docv:"JOB" [])
|
||||||
|
|
||||||
let build =
|
let build =
|
||||||
let doc = "Build uuid." in
|
let doc = "build uuid" in
|
||||||
Cmdliner.Arg.(required &
|
Cmdliner.Arg.(required &
|
||||||
pos 0 (some string) None &
|
pos 0 (some string) None &
|
||||||
info ~doc ~docv:"BUILD" [])
|
info ~doc ~docv:"BUILD" [])
|
||||||
|
|
||||||
let platform =
|
|
||||||
let doc = "Platform." in
|
|
||||||
Cmdliner.Arg.(value &
|
|
||||||
opt (some string) None &
|
|
||||||
info ~doc ~docv:"PLATFORM" ["platform"])
|
|
||||||
|
|
||||||
let full_dest =
|
let full_dest =
|
||||||
let doc = "path to write build file" in
|
let doc = "path to write build file" in
|
||||||
Cmdliner.Arg.(value & opt string "full" &
|
Cmdliner.Arg.(value & opt string "full" &
|
||||||
|
@ -976,102 +892,6 @@ let job_remove_cmd =
|
||||||
let info = Cmd.info ~doc "job-remove" in
|
let info = Cmd.info ~doc "job-remove" in
|
||||||
Cmd.v info term
|
Cmd.v info term
|
||||||
|
|
||||||
let vacuum_cmd =
|
|
||||||
let jobs =
|
|
||||||
Arg.(value & opt_all string [] & info ~doc:"Job(s). Can be passed multiple times." ~docv:"JOB" ["job"])
|
|
||||||
in
|
|
||||||
let ptime_conv =
|
|
||||||
let parse s =
|
|
||||||
match Ptime.of_rfc3339 s with
|
|
||||||
| Ok (ptime, (None | Some 0), _) ->
|
|
||||||
Ok (`Date ptime)
|
|
||||||
| Ok _ -> Error (`Msg "only UTC timezone is allowed")
|
|
||||||
| Error `RFC3339 (_range, e) ->
|
|
||||||
Error (`Msg (Format.asprintf "bad RFC3339 date-time: %a" Ptime.pp_rfc3339_error e))
|
|
||||||
and pp ppf (`Date ptime) =
|
|
||||||
Ptime.pp_rfc3339 () ppf ptime
|
|
||||||
in
|
|
||||||
Arg.conv (parse, pp)
|
|
||||||
in
|
|
||||||
let older_than =
|
|
||||||
let doc = "cut-off date-time" in
|
|
||||||
Arg.(required & pos 0 (some ptime_conv) None & info ~doc ~docv:"OLDER-THAN" [])
|
|
||||||
in
|
|
||||||
(* TODO(reynir): for now we disallow 0 so as to avoid ending up with jobs
|
|
||||||
without builds. I'm unsure how well builder-web works with empty jobs.
|
|
||||||
Then again we don't do this check for older-than... *)
|
|
||||||
let latest_n =
|
|
||||||
let doc = "latest N" in
|
|
||||||
let latest_n =
|
|
||||||
let parse s =
|
|
||||||
match Arg.(conv_parser int) s with
|
|
||||||
| Ok n when n > 0 -> Ok (`Latest n)
|
|
||||||
| Ok _ -> Error (`Msg "must be positive integer")
|
|
||||||
| Error _ as e -> e
|
|
||||||
and pp ppf (`Latest n) =
|
|
||||||
Arg.(conv_printer int) ppf n
|
|
||||||
in
|
|
||||||
Arg.conv (parse, pp)
|
|
||||||
in
|
|
||||||
Arg.(required & pos 0 (some latest_n) None & info ~doc ~docv:"LATEST-N" [])
|
|
||||||
in
|
|
||||||
let latest_n_succesful =
|
|
||||||
let doc = "latest N successful" in
|
|
||||||
let latest_n =
|
|
||||||
let parse s =
|
|
||||||
match Arg.(conv_parser int) s with
|
|
||||||
| Ok n when n > 0 -> Ok (`Latest_successful n)
|
|
||||||
| Ok _ -> Error (`Msg "must be positive integer")
|
|
||||||
| Error _ as e -> e
|
|
||||||
and pp ppf (`Latest_successful n) =
|
|
||||||
Arg.(conv_printer int) ppf n
|
|
||||||
in
|
|
||||||
Arg.conv (parse, pp)
|
|
||||||
in
|
|
||||||
Arg.(required & pos 0 (some latest_n) None & info ~doc ~docv:"LATEST-N" [])
|
|
||||||
in
|
|
||||||
let job_default_txt =
|
|
||||||
"By default all jobs are vacuumed, unless any jobs are specified using --job."
|
|
||||||
in
|
|
||||||
let vacuum_older_than =
|
|
||||||
let doc =
|
|
||||||
Printf.sprintf "Remove builds older than a date. %s" job_default_txt
|
|
||||||
in
|
|
||||||
let info = Cmd.info ~doc "older-than" in
|
|
||||||
let term =
|
|
||||||
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ older_than)
|
|
||||||
in
|
|
||||||
Cmd.v info term
|
|
||||||
in
|
|
||||||
let vacuum_except_latest_n =
|
|
||||||
let doc =
|
|
||||||
Printf.sprintf "Remove all builds except for the latest N builds (successful or not). %s"
|
|
||||||
job_default_txt
|
|
||||||
in
|
|
||||||
let info = Cmd.info ~doc "except-latest" in
|
|
||||||
let term =
|
|
||||||
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ latest_n)
|
|
||||||
in
|
|
||||||
Cmd.v info term
|
|
||||||
in
|
|
||||||
let vacuum_except_latest_n_successful =
|
|
||||||
let doc =
|
|
||||||
Printf.sprintf "Remove all builds except for builds newer than the Nth latest successful build. %s"
|
|
||||||
job_default_txt
|
|
||||||
in
|
|
||||||
let info = Cmd.info ~doc "except-latest-successful" in
|
|
||||||
let term =
|
|
||||||
Term.(const vacuum $ setup_log $ datadir $ platform $ jobs $ latest_n_succesful)
|
|
||||||
in
|
|
||||||
Cmd.v info term
|
|
||||||
in
|
|
||||||
let doc = "Remove old builds" in
|
|
||||||
Cmd.group (Cmd.info ~doc "vacuum") [
|
|
||||||
vacuum_older_than;
|
|
||||||
vacuum_except_latest_n;
|
|
||||||
vacuum_except_latest_n_successful;
|
|
||||||
]
|
|
||||||
|
|
||||||
let extract_full_cmd =
|
let extract_full_cmd =
|
||||||
let doc = "extract a build from the database" in
|
let doc = "extract a build from the database" in
|
||||||
let term = Term.(
|
let term = Term.(
|
||||||
|
@ -1099,6 +919,12 @@ let verify_cache_dir_cmd =
|
||||||
let info = Cmd.info ~doc "verify-cache-dir" in
|
let info = Cmd.info ~doc "verify-cache-dir" in
|
||||||
Cmd.v info term
|
Cmd.v info term
|
||||||
|
|
||||||
|
let time_size_graph_cmd =
|
||||||
|
let doc = "output the build times and binary sizes of a job" in
|
||||||
|
let term = Term.(const time_size_graph $ setup_log $ datadir $ jobname) in
|
||||||
|
let info = Cmd.info ~doc "time-size-graph" in
|
||||||
|
Cmd.v info term
|
||||||
|
|
||||||
let help_cmd =
|
let help_cmd =
|
||||||
let topic =
|
let topic =
|
||||||
let doc = "Command to get help on" in
|
let doc = "Command to get help on" in
|
||||||
|
@ -1115,7 +941,7 @@ let default_cmd, default_info =
|
||||||
Cmd.info ~doc "builder-db"
|
Cmd.info ~doc "builder-db"
|
||||||
|
|
||||||
let () =
|
let () =
|
||||||
Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna);
|
Mirage_crypto_rng_unix.initialize ();
|
||||||
Cmdliner.Cmd.group
|
Cmdliner.Cmd.group
|
||||||
~default:default_cmd default_info
|
~default:default_cmd default_info
|
||||||
[ help_cmd; migrate_cmd;
|
[ help_cmd; migrate_cmd;
|
||||||
|
@ -1126,6 +952,6 @@ let () =
|
||||||
verify_data_dir_cmd;
|
verify_data_dir_cmd;
|
||||||
verify_cache_dir_cmd;
|
verify_cache_dir_cmd;
|
||||||
extract_full_cmd;
|
extract_full_cmd;
|
||||||
vacuum_cmd ]
|
time_size_graph_cmd ]
|
||||||
|> Cmdliner.Cmd.eval
|
|> Cmdliner.Cmd.eval
|
||||||
|> exit
|
|> exit
|
||||||
|
|
|
@ -30,7 +30,7 @@ let write_raw s buf =
|
||||||
safe_close s >|= fun () ->
|
safe_close s >|= fun () ->
|
||||||
Error `Exception)
|
Error `Exception)
|
||||||
in
|
in
|
||||||
(* Logs.debug (fun m -> m "writing %a" (Ohex.pp_hexdump ()) (Bytes.unsafe_to_string buf)) ; *)
|
(* Logs.debug (fun m -> m "writing %a" Cstruct.hexdump_pp (Cstruct.of_bytes buf)) ; *)
|
||||||
w 0 (Bytes.length buf)
|
w 0 (Bytes.length buf)
|
||||||
|
|
||||||
let process =
|
let process =
|
||||||
|
@ -113,14 +113,13 @@ let run_batch_viz ~cachedir ~datadir ~configdir =
|
||||||
m "Error while starting batch-viz.sh: %a"
|
m "Error while starting batch-viz.sh: %a"
|
||||||
Rresult.R.pp_msg err)
|
Rresult.R.pp_msg err)
|
||||||
|
|
||||||
let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag expired_jobs =
|
let setup_app level influx port host datadir cachedir configdir run_batch_viz_flag =
|
||||||
let dbpath = Printf.sprintf "%s/builder.sqlite3" datadir in
|
let dbpath = Printf.sprintf "%s/builder.sqlite3" datadir in
|
||||||
let datadir = Fpath.v datadir in
|
let datadir = Fpath.v datadir in
|
||||||
let cachedir =
|
let cachedir =
|
||||||
cachedir |> Option.fold ~none:Fpath.(datadir / "_cache") ~some:Fpath.v
|
cachedir |> Option.fold ~none:Fpath.(datadir / "_cache") ~some:Fpath.v
|
||||||
in
|
in
|
||||||
let configdir = Fpath.v configdir in
|
let configdir = Fpath.v configdir in
|
||||||
let () = Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna) in
|
|
||||||
let () = init_influx "builder-web" influx in
|
let () = init_influx "builder-web" influx in
|
||||||
let () =
|
let () =
|
||||||
if run_batch_viz_flag then
|
if run_batch_viz_flag then
|
||||||
|
@ -159,7 +158,7 @@ let setup_app level influx port host datadir cachedir configdir run_batch_viz_fl
|
||||||
let error_handler = Dream.error_template Builder_web.error_template in
|
let error_handler = Dream.error_template Builder_web.error_template in
|
||||||
Dream.initialize_log ?level ();
|
Dream.initialize_log ?level ();
|
||||||
let dream_routes = Builder_web.(
|
let dream_routes = Builder_web.(
|
||||||
routes ~datadir ~cachedir ~configdir ~expired_jobs
|
routes ~datadir ~cachedir ~configdir
|
||||||
|> to_dream_routes
|
|> to_dream_routes
|
||||||
)
|
)
|
||||||
in
|
in
|
||||||
|
@ -196,11 +195,10 @@ let ip_port : (Ipaddr.V4.t * int) Arg.conv =
|
||||||
let datadir =
|
let datadir =
|
||||||
let doc = "data directory" in
|
let doc = "data directory" in
|
||||||
let docv = "DATA_DIR" in
|
let docv = "DATA_DIR" in
|
||||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
|
|
||||||
Arg.(
|
Arg.(
|
||||||
value &
|
value &
|
||||||
opt dir Builder_system.default_datadir &
|
opt dir Builder_system.default_datadir &
|
||||||
info ~env [ "d"; "datadir" ] ~doc ~docv
|
info [ "d"; "datadir" ] ~doc ~docv
|
||||||
)
|
)
|
||||||
|
|
||||||
let cachedir =
|
let cachedir =
|
||||||
|
@ -242,15 +240,11 @@ let run_batch_viz =
|
||||||
log is written to CACHE_DIR/batch-viz.log" in
|
log is written to CACHE_DIR/batch-viz.log" in
|
||||||
Arg.(value & flag & info [ "run-batch-viz" ] ~doc)
|
Arg.(value & flag & info [ "run-batch-viz" ] ~doc)
|
||||||
|
|
||||||
let expired_jobs =
|
|
||||||
let doc = "Amount of days after which a job is considered to be inactive if \
|
|
||||||
no successful build has been achieved (use 0 for infinite)" in
|
|
||||||
Arg.(value & opt int 30 & info [ "expired-jobs" ] ~doc)
|
|
||||||
|
|
||||||
let () =
|
let () =
|
||||||
let term =
|
let term =
|
||||||
Term.(const setup_app $ Logs_cli.level () $ influx $ port $ host $ datadir $
|
Term.(const setup_app $ Logs_cli.level () $ influx $ port $ host $ datadir $
|
||||||
cachedir $ configdir $ run_batch_viz $ expired_jobs)
|
cachedir $ configdir $ run_batch_viz)
|
||||||
in
|
in
|
||||||
let info = Cmd.info "Builder web" ~doc:"Builder web" ~man:[] in
|
let info = Cmd.info "Builder web" ~doc:"Builder web" ~man:[] in
|
||||||
Cmd.v info term
|
Cmd.v info term
|
||||||
|
|
7
bin/dune
7
bin/dune
|
@ -7,13 +7,10 @@
|
||||||
(public_name builder-web)
|
(public_name builder-web)
|
||||||
(name builder_web_app)
|
(name builder_web_app)
|
||||||
(modules builder_web_app)
|
(modules builder_web_app)
|
||||||
(libraries builder_web builder_system mirage-crypto-rng.unix cmdliner
|
(libraries builder_web builder_system mirage-crypto-rng.unix cmdliner logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr ipaddr.unix http_status_metrics))
|
||||||
logs.cli metrics metrics-lwt metrics-influx metrics-rusage ipaddr
|
|
||||||
ipaddr.unix http_status_metrics))
|
|
||||||
|
|
||||||
(executable
|
(executable
|
||||||
(public_name builder-db)
|
(public_name builder-db)
|
||||||
(name builder_db_app)
|
(name builder_db_app)
|
||||||
(modules builder_db_app)
|
(modules builder_db_app)
|
||||||
(libraries builder_web builder_db builder_system caqti.blocking uri bos fmt
|
(libraries builder_web builder_db builder_system caqti.blocking uri bos fmt logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))
|
||||||
logs logs.cli logs.fmt cmdliner mirage-crypto-rng.unix builder))
|
|
||||||
|
|
|
@ -61,10 +61,9 @@ let help man_format migrations = function
|
||||||
|
|
||||||
let datadir =
|
let datadir =
|
||||||
let doc = "data directory containing builder.sqlite3 and data files" in
|
let doc = "data directory containing builder.sqlite3 and data files" in
|
||||||
let env = Cmdliner.Cmd.Env.info "BUILDER_WEB_DATADIR" in
|
|
||||||
Cmdliner.Arg.(value &
|
Cmdliner.Arg.(value &
|
||||||
opt dir Builder_system.default_datadir &
|
opt dir Builder_system.default_datadir &
|
||||||
info ~env ~doc ["datadir"; "d"])
|
info ~doc ["datadir"; "d"])
|
||||||
|
|
||||||
let setup_log =
|
let setup_log =
|
||||||
let setup_log level =
|
let setup_log level =
|
||||||
|
@ -180,8 +179,6 @@ let () =
|
||||||
[ f20210910 ];
|
[ f20210910 ];
|
||||||
actions (module M20211105);
|
actions (module M20211105);
|
||||||
actions (module M20220509);
|
actions (module M20220509);
|
||||||
actions (module M20230911);
|
|
||||||
actions (module M20230914);
|
|
||||||
])
|
])
|
||||||
|> Cmd.eval
|
|> Cmd.eval
|
||||||
|> exit
|
|> exit
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
(executable
|
(executable
|
||||||
(public_name builder-migrations)
|
(public_name builder-migrations)
|
||||||
(name builder_migrations)
|
(name builder_migrations)
|
||||||
(libraries builder_system builder_db caqti caqti-driver-sqlite3
|
(libraries builder_system builder_db caqti caqti-driver-sqlite3 caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))
|
||||||
caqti.blocking cmdliner logs logs.cli logs.fmt opam-format bos duration))
|
|
||||||
|
|
|
@ -18,11 +18,11 @@ let all_builds =
|
||||||
"SELECT id FROM build"
|
"SELECT id FROM build"
|
||||||
|
|
||||||
let bin_artifact =
|
let bin_artifact =
|
||||||
Caqti_type.int64 ->* Caqti_type.(t2 int64 string) @@
|
Caqti_type.int64 ->* Caqti_type.(tup2 int64 string) @@
|
||||||
"SELECT id, filepath FROM build_artifact WHERE build = ? AND filepath LIKE 'bin/%'"
|
"SELECT id, filepath FROM build_artifact WHERE build = ? AND filepath LIKE 'bin/%'"
|
||||||
|
|
||||||
let set_main_binary =
|
let set_main_binary =
|
||||||
Caqti_type.(t2 int64 (option string)) ->. Caqti_type.unit @@
|
Caqti_type.(tup2 int64 (option string)) ->. Caqti_type.unit @@
|
||||||
"UPDATE build SET main_binary = $2 WHERE id = $1"
|
"UPDATE build SET main_binary = $2 WHERE id = $1"
|
||||||
|
|
||||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||||
|
|
|
@ -37,21 +37,21 @@ let new_build_file =
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let collect_build_artifact =
|
let collect_build_artifact =
|
||||||
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@
|
Caqti_type.unit ->* Caqti_type.(tup3 int64 (tup3 string string octets) int64) @@
|
||||||
"SELECT id, filepath, localpath, sha256, build FROM build_artifact"
|
"SELECT id, filepath, localpath, sha256, build FROM build_artifact"
|
||||||
|
|
||||||
let collect_build_file =
|
let collect_build_file =
|
||||||
Caqti_type.unit ->* Caqti_type.(t3 int64 (t3 string string octets) int64) @@
|
Caqti_type.unit ->* Caqti_type.(tup3 int64 (tup3 string string octets) int64) @@
|
||||||
"SELECT id, filepath, localpath, sha256, build FROM build_file"
|
"SELECT id, filepath, localpath, sha256, build FROM build_file"
|
||||||
|
|
||||||
let insert_new_build_artifact =
|
let insert_new_build_artifact =
|
||||||
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@
|
Caqti_type.(tup3 int64 (tup4 string string octets int64) int64) ->. Caqti_type.unit @@
|
||||||
{| INSERT INTO new_build_artifact (id, filepath, localpath, sha256, size, build)
|
{| INSERT INTO new_build_artifact (id, filepath, localpath, sha256, size, build)
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let insert_new_build_file =
|
let insert_new_build_file =
|
||||||
Caqti_type.(t3 int64 (t4 string string octets int64) int64) ->. Caqti_type.unit @@
|
Caqti_type.(tup3 int64 (tup4 string string octets int64) int64) ->. Caqti_type.unit @@
|
||||||
{| INSERT INTO new_build_file (id, filepath, localpath, sha256, size, build)
|
{| INSERT INTO new_build_file (id, filepath, localpath, sha256, size, build)
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
|}
|
|}
|
||||||
|
|
|
@ -3,7 +3,7 @@ module Rep = Builder_db.Rep
|
||||||
open Grej.Infix
|
open Grej.Infix
|
||||||
|
|
||||||
let broken_builds =
|
let broken_builds =
|
||||||
Caqti_type.unit ->* Caqti_type.t3 (Rep.id `build) Rep.uuid Caqti_type.string @@
|
Caqti_type.unit ->* Caqti_type.tup3 (Rep.id `build) Rep.uuid Caqti_type.string @@
|
||||||
{| SELECT b.id, b.uuid, job.name FROM build b, job
|
{| SELECT b.id, b.uuid, job.name FROM build b, job
|
||||||
WHERE result_kind = 0 AND result_code = 0 AND main_binary IS NOT NULL AND job.id = b.job AND
|
WHERE result_kind = 0 AND result_code = 0 AND main_binary IS NOT NULL AND job.id = b.job AND
|
||||||
(SELECT COUNT( * ) FROM build_artifact a
|
(SELECT COUNT( * ) FROM build_artifact a
|
||||||
|
|
|
@ -7,11 +7,11 @@ let rollback_doc = "add datadir prefix to build_artifact.localpath"
|
||||||
open Grej.Infix
|
open Grej.Infix
|
||||||
|
|
||||||
let build_artifacts =
|
let build_artifacts =
|
||||||
Caqti_type.unit ->* Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@
|
Caqti_type.unit ->* Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath @@
|
||||||
"SELECT id, localpath FROM build_artifact"
|
"SELECT id, localpath FROM build_artifact"
|
||||||
|
|
||||||
let build_artifact_update_localpath =
|
let build_artifact_update_localpath =
|
||||||
Caqti_type.t2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@
|
Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.fpath ->. Caqti_type.unit @@
|
||||||
"UPDATE build_artifact SET localpath = $2 WHERE id = $1"
|
"UPDATE build_artifact SET localpath = $2 WHERE id = $1"
|
||||||
|
|
||||||
(* We are not migrating build_file because it is unused *)
|
(* We are not migrating build_file because it is unused *)
|
||||||
|
|
|
@ -54,20 +54,20 @@ let old_build =
|
||||||
|
|
||||||
let collect_old_build =
|
let collect_old_build =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
Caqti_type.(tup3 Builder_db.Rep.untyped_id
|
||||||
(t3 (t4 string int64 int64 int64)
|
(tup3 (tup4 string int64 int64 int64)
|
||||||
(t4 int64 int (option int) (option string))
|
(tup4 int64 int (option int) (option string))
|
||||||
(t3 octets string (option string)))
|
(tup3 octets string (option string)))
|
||||||
Builder_db.Rep.untyped_id) @@
|
Builder_db.Rep.untyped_id) @@
|
||||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
|
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
|
||||||
console, script, main_binary, job
|
console, script, main_binary, job
|
||||||
FROM build |}
|
FROM build |}
|
||||||
|
|
||||||
let insert_new_build =
|
let insert_new_build =
|
||||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
Caqti_type.(tup3 Builder_db.Rep.untyped_id
|
||||||
(t3 (t4 string int64 int64 int64)
|
(tup3 (tup4 string int64 int64 int64)
|
||||||
(t4 int64 int (option int) (option string))
|
(tup4 int64 int (option int) (option string))
|
||||||
(t3 octets string (option Builder_db.Rep.untyped_id)))
|
(tup3 octets string (option Builder_db.Rep.untyped_id)))
|
||||||
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
|
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
|
||||||
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
|
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
|
||||||
result_code, result_msg, console, script, main_binary, job)
|
result_code, result_msg, console, script, main_binary, job)
|
||||||
|
@ -82,7 +82,7 @@ let rename_build =
|
||||||
"ALTER TABLE new_build RENAME TO build"
|
"ALTER TABLE new_build RENAME TO build"
|
||||||
|
|
||||||
let find_main_artifact_id =
|
let find_main_artifact_id =
|
||||||
Caqti_type.(t2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@
|
Caqti_type.(tup2 Builder_db.Rep.untyped_id string) ->! Builder_db.Rep.untyped_id @@
|
||||||
"SELECT id FROM build_artifact WHERE build = $1 AND filepath = $2"
|
"SELECT id FROM build_artifact WHERE build = $1 AND filepath = $2"
|
||||||
|
|
||||||
let find_main_artifact_filepath =
|
let find_main_artifact_filepath =
|
||||||
|
@ -91,20 +91,20 @@ let find_main_artifact_filepath =
|
||||||
|
|
||||||
let collect_new_build =
|
let collect_new_build =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
Caqti_type.(tup3 Builder_db.Rep.untyped_id
|
||||||
(t3 (t4 string int64 int64 int64)
|
(tup3 (tup4 string int64 int64 int64)
|
||||||
(t4 int64 int (option int) (option string))
|
(tup4 int64 int (option int) (option string))
|
||||||
(t3 octets string (option Builder_db.Rep.untyped_id)))
|
(tup3 octets string (option Builder_db.Rep.untyped_id)))
|
||||||
Builder_db.Rep.untyped_id) @@
|
Builder_db.Rep.untyped_id) @@
|
||||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
|
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind, result_code, result_msg,
|
||||||
console, script, main_binary, job
|
console, script, main_binary, job
|
||||||
FROM build |}
|
FROM build |}
|
||||||
|
|
||||||
let insert_old_build =
|
let insert_old_build =
|
||||||
Caqti_type.(t3 Builder_db.Rep.untyped_id
|
Caqti_type.(tup3 Builder_db.Rep.untyped_id
|
||||||
(t3 (t4 string int64 int64 int64)
|
(tup3 (tup4 string int64 int64 int64)
|
||||||
(t4 int64 int (option int) (option string))
|
(tup4 int64 int (option int) (option string))
|
||||||
(t3 octets string (option string)))
|
(tup3 octets string (option string)))
|
||||||
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
|
Builder_db.Rep.untyped_id) ->. Caqti_type.unit @@
|
||||||
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
|
{| INSERT INTO new_build (id, uuid, start_d, start_ps, finish_d, finish_ps, result_kind,
|
||||||
result_code, result_msg, console, script, main_binary, job)
|
result_code, result_msg, console, script, main_binary, job)
|
||||||
|
|
|
@ -34,21 +34,21 @@ let old_user =
|
||||||
|
|
||||||
let collect_old_user =
|
let collect_old_user =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) @@
|
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64)) @@
|
||||||
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p FROM user"
|
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p FROM user"
|
||||||
|
|
||||||
let collect_new_user =
|
let collect_new_user =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) @@
|
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool)) @@
|
||||||
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted FROM user"
|
"SELECT id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted FROM user"
|
||||||
|
|
||||||
let insert_new_user =
|
let insert_new_user =
|
||||||
Caqti_type.(t4 int64 string (t2 octets octets) (t4 int64 int64 int64 bool)) ->.
|
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup4 int64 int64 int64 bool)) ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
|
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
|
||||||
|
|
||||||
let insert_old_user =
|
let insert_old_user =
|
||||||
Caqti_type.(t4 int64 string (t2 octets octets) (t3 int64 int64 int64)) ->.
|
Caqti_type.(tup4 int64 string (tup2 octets octets) (tup3 int64 int64 int64)) ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p) VALUES (?, ?, ?, ?, ?, ?, ?)"
|
"INSERT INTO new_user (id, username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p) VALUES (?, ?, ?, ?, ?, ?, ?)"
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,7 @@ let latest_successful_build =
|
||||||
|
|
||||||
let build_artifacts =
|
let build_artifacts =
|
||||||
Builder_db.Rep.untyped_id ->*
|
Builder_db.Rep.untyped_id ->*
|
||||||
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
Caqti_type.tup2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||||
{| SELECT a.filepath, a.localpath
|
{| SELECT a.filepath, a.localpath
|
||||||
FROM build_artifact a
|
FROM build_artifact a
|
||||||
WHERE a.build = ?
|
WHERE a.build = ?
|
||||||
|
@ -106,7 +106,7 @@ let insert_tag =
|
||||||
"INSERT INTO tag (tag) VALUES (?)"
|
"INSERT INTO tag (tag) VALUES (?)"
|
||||||
|
|
||||||
let insert_job_tag =
|
let insert_job_tag =
|
||||||
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
Caqti_type.(tup3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"
|
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ let latest_successful_build =
|
||||||
|
|
||||||
let build_artifacts =
|
let build_artifacts =
|
||||||
Builder_db.Rep.untyped_id ->*
|
Builder_db.Rep.untyped_id ->*
|
||||||
Caqti_type.t2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
Caqti_type.tup2 Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||||
{| SELECT a.filepath, a.localpath
|
{| SELECT a.filepath, a.localpath
|
||||||
FROM build_artifact a
|
FROM build_artifact a
|
||||||
WHERE a.build = ?
|
WHERE a.build = ?
|
||||||
|
@ -31,7 +31,7 @@ let insert_tag =
|
||||||
"INSERT INTO tag (tag) VALUES (?)"
|
"INSERT INTO tag (tag) VALUES (?)"
|
||||||
|
|
||||||
let insert_job_tag =
|
let insert_job_tag =
|
||||||
Caqti_type.(t3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
Caqti_type.(tup3 Builder_db.Rep.untyped_id string Builder_db.Rep.untyped_id) ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"
|
"INSERT INTO job_tag (tag, value, job) VALUES (?, ?, ?)"
|
||||||
|
|
||||||
|
|
|
@ -55,11 +55,11 @@ let drop_input_id_from_build =
|
||||||
|
|
||||||
let builds =
|
let builds =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t4
|
Caqti_type.tup4
|
||||||
Builder_db.Rep.untyped_id
|
Builder_db.Rep.untyped_id
|
||||||
Caqti_type.octets
|
Builder_db.Rep.cstruct
|
||||||
Caqti_type.octets
|
Builder_db.Rep.cstruct
|
||||||
Caqti_type.octets @@
|
Builder_db.Rep.cstruct @@
|
||||||
{| SELECT b.id, opam.sha256, env.sha256, system.sha256
|
{| SELECT b.id, opam.sha256, env.sha256, system.sha256
|
||||||
FROM build b, build_artifact opam, build_artifact env, build_artifact system
|
FROM build b, build_artifact opam, build_artifact env, build_artifact system
|
||||||
WHERE opam.filepath = 'opam-switch' AND env.filepath = 'build-environment'
|
WHERE opam.filepath = 'opam-switch' AND env.filepath = 'build-environment'
|
||||||
|
@ -68,7 +68,7 @@ let builds =
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let set_input_id =
|
let set_input_id =
|
||||||
Caqti_type.t2 Builder_db.Rep.untyped_id Caqti_type.octets ->. Caqti_type.unit @@
|
Caqti_type.tup2 Builder_db.Rep.untyped_id Builder_db.Rep.cstruct ->. Caqti_type.unit @@
|
||||||
"UPDATE build SET input_id = $2 WHERE id = $1"
|
"UPDATE build SET input_id = $2 WHERE id = $1"
|
||||||
|
|
||||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||||
|
@ -76,7 +76,7 @@ let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||||
Db.exec add_input_id_to_build () >>= fun () ->
|
Db.exec add_input_id_to_build () >>= fun () ->
|
||||||
Db.collect_list builds () >>= fun builds ->
|
Db.collect_list builds () >>= fun builds ->
|
||||||
Grej.list_iter_result (fun (id, opam_sha, env_sha, pkg_sha) ->
|
Grej.list_iter_result (fun (id, opam_sha, env_sha, pkg_sha) ->
|
||||||
let input_id = Digestif.SHA256.(to_raw_string (digestv_string [ opam_sha ; env_sha ; pkg_sha ])) in
|
let input_id = Mirage_crypto.Hash.SHA256.digest (Cstruct.concat [ opam_sha ; env_sha ; pkg_sha ]) in
|
||||||
Db.exec set_input_id (id, input_id))
|
Db.exec set_input_id (id, input_id))
|
||||||
builds >>= fun () ->
|
builds >>= fun () ->
|
||||||
Db.exec (Grej.set_version new_version) ()
|
Db.exec (Grej.set_version new_version) ()
|
||||||
|
|
|
@ -2,7 +2,7 @@ open Grej.Infix
|
||||||
|
|
||||||
let orb_left_in_builds =
|
let orb_left_in_builds =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||||
{| SELECT id, localpath FROM build_artifact
|
{| SELECT id, localpath FROM build_artifact
|
||||||
WHERE filepath = 'orb.deb' OR filepath = 'orb.txz'
|
WHERE filepath = 'orb.deb' OR filepath = 'orb.txz'
|
||||||
|}
|
|}
|
||||||
|
|
|
@ -2,7 +2,7 @@ open Grej.Infix
|
||||||
|
|
||||||
let deb_debug_left_in_builds =
|
let deb_debug_left_in_builds =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build)
|
Caqti_type.tup4 (Builder_db.Rep.id `build_artifact) (Builder_db.Rep.id `build)
|
||||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||||
{| SELECT id, build, localpath, filepath FROM build_artifact
|
{| SELECT id, build, localpath, filepath FROM build_artifact
|
||||||
WHERE filepath LIKE '%.deb.debug'
|
WHERE filepath LIKE '%.deb.debug'
|
||||||
|
@ -17,7 +17,7 @@ let get_localpath =
|
||||||
"SELECT localpath FROM build_artifact WHERE id = ?"
|
"SELECT localpath FROM build_artifact WHERE id = ?"
|
||||||
|
|
||||||
let update_paths =
|
let update_paths =
|
||||||
Caqti_type.t3 (Builder_db.Rep.id `build_artifact)
|
Caqti_type.tup3 (Builder_db.Rep.id `build_artifact)
|
||||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
|
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
|
||||||
|
|
|
@ -2,7 +2,7 @@ open Grej.Infix
|
||||||
|
|
||||||
let all_builds_with_binary : (unit, [`build] Builder_db.Rep.id * [`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
let all_builds_with_binary : (unit, [`build] Builder_db.Rep.id * [`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact)
|
Caqti_type.tup4 (Builder_db.Rep.id `build) (Builder_db.Rep.id `build_artifact)
|
||||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||||
"SELECT b.id, b.main_binary, a.filepath, a.localpath FROM build b, build_artifact a WHERE b.main_binary = a.id AND b.main_binary IS NOT NULL"
|
"SELECT b.id, b.main_binary, a.filepath, a.localpath FROM build b, build_artifact a WHERE b.main_binary = a.id AND b.main_binary IS NOT NULL"
|
||||||
|
|
||||||
|
@ -11,14 +11,14 @@ let build_not_stripped : ([`build] Builder_db.Rep.id, [`build_artifact] Builder_
|
||||||
"SELECT id FROM build_artifact WHERE build = ? AND filepath LIKE '%.debug'"
|
"SELECT id FROM build_artifact WHERE build = ? AND filepath LIKE '%.debug'"
|
||||||
|
|
||||||
let update_paths : ([`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, unit, [ `Zero ]) Caqti_request.t =
|
let update_paths : ([`build_artifact] Builder_db.Rep.id * Fpath.t * Fpath.t, unit, [ `Zero ]) Caqti_request.t =
|
||||||
Caqti_type.t3 (Builder_db.Rep.id `build_artifact)
|
Caqti_type.tup3 (Builder_db.Rep.id `build_artifact)
|
||||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
|
"UPDATE build_artifact SET localpath = $2, filepath = $3 WHERE id = $1"
|
||||||
|
|
||||||
let add_artifact : ((Fpath.t * Fpath.t * string) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t =
|
let add_artifact : ((Fpath.t * Fpath.t * Cstruct.t) * (int64 * [`build] Builder_db.Rep.id), unit, [ `Zero]) Caqti_request.t =
|
||||||
Caqti_type.(t2 (t3 Builder_db.Rep.fpath Builder_db.Rep.fpath Caqti_type.octets)
|
Caqti_type.(tup2 (tup3 Builder_db.Rep.fpath Builder_db.Rep.fpath Builder_db.Rep.cstruct)
|
||||||
(t2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->.
|
(tup2 Caqti_type.int64 (Builder_db.Rep.id `build))) ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) VALUES (?, ?, ?, ?, ?)"
|
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) VALUES (?, ?, ?, ?, ?)"
|
||||||
|
|
||||||
|
@ -48,8 +48,7 @@ let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||||
in
|
in
|
||||||
assert (r = 0);
|
assert (r = 0);
|
||||||
Bos.OS.File.read (Fpath.append datadir artifact_lpath) >>= fun data ->
|
Bos.OS.File.read (Fpath.append datadir artifact_lpath) >>= fun data ->
|
||||||
let size = Int64.of_int (String.length data)
|
let size = Int64.of_int (String.length data) and sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||||
and sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
|
||||||
Db.exec update_paths (artifact_id, new_artifact_lpath, path artifact_fpath) >>= fun () ->
|
Db.exec update_paths (artifact_id, new_artifact_lpath, path artifact_fpath) >>= fun () ->
|
||||||
Db.exec add_artifact ((artifact_fpath, artifact_lpath, sha256), (size, build_id)) >>= fun () ->
|
Db.exec add_artifact ((artifact_fpath, artifact_lpath, sha256), (size, build_id)) >>= fun () ->
|
||||||
Db.find Builder_db.last_insert_rowid () >>= fun new_build_artifact_id ->
|
Db.find Builder_db.last_insert_rowid () >>= fun new_build_artifact_id ->
|
||||||
|
|
|
@ -2,11 +2,11 @@ open Grej.Infix
|
||||||
|
|
||||||
let all_build_artifacts_with_dot_slash : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
let all_build_artifacts_with_dot_slash : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||||
"SELECT id, filepath FROM build_artifact WHERE filepath LIKE './%'"
|
"SELECT id, filepath FROM build_artifact WHERE filepath LIKE './%'"
|
||||||
|
|
||||||
let update_path : ([`build_artifact] Builder_db.Rep.id * Fpath.t, unit, [< `Zero | `One | `Many > `Zero ]) Caqti_request.t =
|
let update_path : ([`build_artifact] Builder_db.Rep.id * Fpath.t, unit, [< `Zero | `One | `Many > `Zero ]) Caqti_request.t =
|
||||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->.
|
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE build_artifact SET filepath = $2 WHERE id = $1"
|
"UPDATE build_artifact SET filepath = $2 WHERE id = $1"
|
||||||
|
|
||||||
|
|
|
@ -40,11 +40,11 @@ let copy_old_build =
|
||||||
|
|
||||||
let old_build_execution_result =
|
let old_build_execution_result =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@
|
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) @@
|
||||||
"SELECT id, result_kind, result_code FROM build"
|
"SELECT id, result_kind, result_code FROM build"
|
||||||
|
|
||||||
let update_new_build_execution_result =
|
let update_new_build_execution_result =
|
||||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@
|
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int) ->. Caqti_type.unit @@
|
||||||
"UPDATE new_build SET result_code = $2 WHERE id = $1"
|
"UPDATE new_build SET result_code = $2 WHERE id = $1"
|
||||||
|
|
||||||
let old_build =
|
let old_build =
|
||||||
|
@ -83,11 +83,11 @@ let copy_new_build =
|
||||||
|
|
||||||
let new_build_execution_result =
|
let new_build_execution_result =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int) @@
|
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int) @@
|
||||||
"SELECT id, result_code FROM build"
|
"SELECT id, result_code FROM build"
|
||||||
|
|
||||||
let update_old_build_execution_result =
|
let update_old_build_execution_result =
|
||||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->.
|
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) int (option int)) ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE new_build SET result_kind = $2, result_code = $3 WHERE id = $1"
|
"UPDATE new_build SET result_kind = $2, result_code = $3 WHERE id = $1"
|
||||||
|
|
||||||
|
|
|
@ -2,12 +2,12 @@ open Grej.Infix
|
||||||
|
|
||||||
let all_build_artifacts_like_hashes : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
let all_build_artifacts_like_hashes : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||||
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%.build-hashes'"
|
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%.build-hashes'"
|
||||||
|
|
||||||
let all_build_artifacts_like_readme : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
let all_build_artifacts_like_readme : (unit, [`build_artifact] Builder_db.Rep.id * Fpath.t, [ `Zero | `One | `Many ]) Caqti_request.t =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
Caqti_type.tup2 (Builder_db.Rep.id `build_artifact) Builder_db.Rep.fpath @@
|
||||||
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%README.md'"
|
"SELECT id, localpath FROM build_artifact WHERE filepath LIKE '%README.md'"
|
||||||
|
|
||||||
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
let fixup datadir (module Db : Caqti_blocking.CONNECTION) =
|
||||||
|
|
|
@ -8,8 +8,8 @@ open Grej.Infix
|
||||||
module Asn = struct
|
module Asn = struct
|
||||||
let decode_strict codec cs =
|
let decode_strict codec cs =
|
||||||
match Asn.decode codec cs with
|
match Asn.decode codec cs with
|
||||||
| Ok (a, rest) ->
|
| Ok (a, cs) ->
|
||||||
if String.length rest = 0
|
if Cstruct.length cs = 0
|
||||||
then Ok a
|
then Ok a
|
||||||
else Error "trailing bytes"
|
else Error "trailing bytes"
|
||||||
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
||||||
|
@ -95,24 +95,24 @@ let copy_from_new_build =
|
||||||
|
|
||||||
let old_build_console_script =
|
let old_build_console_script =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t4 (Builder_db.Rep.id (`build : [ `build ]))
|
Caqti_type.(tup4 (Builder_db.Rep.id (`build : [ `build ]))
|
||||||
(t2 string Builder_db.Rep.uuid) octets string) @@
|
(tup2 string Builder_db.Rep.uuid) Builder_db.Rep.cstruct string) @@
|
||||||
"SELECT b.id, job.name, b.uuid, b.console, b.script FROM build b, job WHERE b.job = job.id"
|
"SELECT b.id, job.name, b.uuid, b.console, b.script FROM build b, job WHERE b.job = job.id"
|
||||||
|
|
||||||
let update_new_build_console_script =
|
let update_new_build_console_script =
|
||||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ]))
|
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ]))
|
||||||
Builder_db.Rep.fpath Builder_db.Rep.fpath) ->.
|
Builder_db.Rep.fpath Builder_db.Rep.fpath) ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
|
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
|
||||||
|
|
||||||
let new_build_console_script =
|
let new_build_console_script =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [ `build ]))
|
Caqti_type.tup3 (Builder_db.Rep.id (`build : [ `build ]))
|
||||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||||
"SELECT id, console, script FROM build"
|
"SELECT id, console, script FROM build"
|
||||||
|
|
||||||
let update_old_build_console_script =
|
let update_old_build_console_script =
|
||||||
Caqti_type.(t3 (Builder_db.Rep.id (`build : [ `build ])) octets string) ->.
|
Caqti_type.(tup3 (Builder_db.Rep.id (`build : [ `build ])) Builder_db.Rep.cstruct string) ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
|
"UPDATE new_build SET console = $2, script = $3 WHERE id = $1"
|
||||||
|
|
||||||
|
|
|
@ -2,13 +2,13 @@ open Grej.Infix
|
||||||
|
|
||||||
let mixups =
|
let mixups =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build]))
|
Caqti_type.tup3 (Builder_db.Rep.id (`build : [`build]))
|
||||||
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
Builder_db.Rep.fpath Builder_db.Rep.fpath @@
|
||||||
"SELECT id, console, script FROM build \
|
"SELECT id, console, script FROM build \
|
||||||
WHERE console LIKE '%/script.txt' AND script LIKE '%/console.txt'"
|
WHERE console LIKE '%/script.txt' AND script LIKE '%/console.txt'"
|
||||||
|
|
||||||
let fixup =
|
let fixup =
|
||||||
Caqti_type.t3 (Builder_db.Rep.id (`build : [`build]))
|
Caqti_type.tup3 (Builder_db.Rep.id (`build : [`build]))
|
||||||
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
Builder_db.Rep.fpath Builder_db.Rep.fpath ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE build SET console = $2, script = $3 WHERE id = $1"
|
"UPDATE build SET console = $2, script = $3 WHERE id = $1"
|
||||||
|
|
|
@ -73,11 +73,11 @@ let copy_from_new_build =
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let build_id_and_user =
|
let build_id_and_user =
|
||||||
Caqti_type.unit ->* Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@
|
Caqti_type.unit ->* Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) int64) @@
|
||||||
"SELECT id, user FROM build"
|
"SELECT id, user FROM build"
|
||||||
|
|
||||||
let update_new_build_platform =
|
let update_new_build_platform =
|
||||||
Caqti_type.(t2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@
|
Caqti_type.(tup2 (Builder_db.Rep.id (`build : [ `build ])) string) ->. Caqti_type.unit @@
|
||||||
"UPDATE new_build SET platform = $2 WHERE id = $1"
|
"UPDATE new_build SET platform = $2 WHERE id = $1"
|
||||||
|
|
||||||
let drop_build =
|
let drop_build =
|
||||||
|
|
|
@ -23,21 +23,21 @@ let new_uuid_rep =
|
||||||
|
|
||||||
let uuids_byte_encoded_q =
|
let uuids_byte_encoded_q =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@
|
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep @@
|
||||||
"SELECT id, uuid FROM build"
|
"SELECT id, uuid FROM build"
|
||||||
|
|
||||||
let uuids_hex_encoded_q =
|
let uuids_hex_encoded_q =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@
|
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep @@
|
||||||
"SELECT id, uuid FROM build"
|
"SELECT id, uuid FROM build"
|
||||||
|
|
||||||
let migrate_q =
|
let migrate_q =
|
||||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->.
|
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) new_uuid_rep ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE build SET uuid = $2 WHERE id = $1"
|
"UPDATE build SET uuid = $2 WHERE id = $1"
|
||||||
|
|
||||||
let rollback_q =
|
let rollback_q =
|
||||||
Caqti_type.t2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->.
|
Caqti_type.tup2 (Builder_db.Rep.id (`build : [`build])) old_uuid_rep ->.
|
||||||
Caqti_type.unit @@
|
Caqti_type.unit @@
|
||||||
"UPDATE build SET uuid = $2 WHERE id = $1"
|
"UPDATE build SET uuid = $2 WHERE id = $1"
|
||||||
|
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
let new_version = 17L and old_version = 16L
|
|
||||||
and identifier = "2023-09-11"
|
|
||||||
and migrate_doc = "index failed builds on main binary is null"
|
|
||||||
and rollback_doc = "index failed builds on exit code"
|
|
||||||
|
|
||||||
open Grej.Syntax
|
|
||||||
|
|
||||||
let drop_idx_build_failed =
|
|
||||||
Caqti_type.(unit ->. unit) @@
|
|
||||||
"DROP INDEX idx_build_failed"
|
|
||||||
|
|
||||||
let migrate _datadir (module Db : Caqti_blocking.CONNECTION) =
|
|
||||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
|
||||||
let* () = Db.exec drop_idx_build_failed () in
|
|
||||||
let* () =
|
|
||||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
|
||||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
|
||||||
WHERE main_binary IS NULL")
|
|
||||||
()
|
|
||||||
in
|
|
||||||
Db.exec (Grej.set_version new_version) ()
|
|
||||||
|
|
||||||
let rollback _datadir (module Db : Caqti_blocking.CONNECTION) =
|
|
||||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
|
||||||
let* () = Db.exec drop_idx_build_failed () in
|
|
||||||
let* () =
|
|
||||||
Db.exec (Caqti_type.unit ->. Caqti_type.unit @@
|
|
||||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) \
|
|
||||||
WHERE result_code <> 0")
|
|
||||||
()
|
|
||||||
in
|
|
||||||
Db.exec (Grej.set_version old_version) ()
|
|
|
@ -1,162 +0,0 @@
|
||||||
let new_version = 18L and old_version = 17L
|
|
||||||
and identifier = "2023-09-14"
|
|
||||||
and migrate_doc = "Artifacts are stored content-addressed in the filesystem"
|
|
||||||
and rollback_doc = "Artifacts are stored under their build's job name and uuid"
|
|
||||||
|
|
||||||
open Grej.Syntax
|
|
||||||
|
|
||||||
let new_build_artifact =
|
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
|
||||||
{| CREATE TABLE new_build_artifact (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
|
||||||
filepath TEXT NOT NULL,
|
|
||||||
sha256 BLOB NOT NULL,
|
|
||||||
size INTEGER NOT NULL,
|
|
||||||
build INTEGER NOT NULL,
|
|
||||||
|
|
||||||
FOREIGN KEY(build) REFERENCES build(id),
|
|
||||||
UNIQUE(build, filepath)
|
|
||||||
)
|
|
||||||
|}
|
|
||||||
|
|
||||||
let old_build_artifact =
|
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
|
||||||
{| CREATE TABLE new_build_artifact (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
|
||||||
filepath TEXT NOT NULL, -- the path as in the build
|
|
||||||
localpath TEXT NOT NULL, -- local path to the file on disk
|
|
||||||
sha256 BLOB NOT NULL,
|
|
||||||
size INTEGER NOT NULL,
|
|
||||||
build INTEGER NOT NULL,
|
|
||||||
|
|
||||||
FOREIGN KEY(build) REFERENCES build(id),
|
|
||||||
UNIQUE(build, filepath)
|
|
||||||
)
|
|
||||||
|}
|
|
||||||
|
|
||||||
let idx_build_artifact_sha256 =
|
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
|
||||||
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)"
|
|
||||||
|
|
||||||
let idx_build_artifact_build =
|
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
|
||||||
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)"
|
|
||||||
|
|
||||||
let copy_new_build_artifact =
|
|
||||||
Caqti_type.(unit ->. unit) @@
|
|
||||||
{| INSERT INTO new_build_artifact(id, filepath, sha256, size, build)
|
|
||||||
SELECT id, filepath, sha256, size, build
|
|
||||||
FROM build_artifact
|
|
||||||
|}
|
|
||||||
|
|
||||||
let copy_old_build_artifact =
|
|
||||||
Caqti_type.(unit ->. unit) @@
|
|
||||||
{| INSERT INTO new_build_artifact(id, filepath, localpath, sha256, size, build)
|
|
||||||
SELECT a.id, a.filepath,
|
|
||||||
j.name || '/' || b.uuid || '/output/' || a.filepath,
|
|
||||||
a.sha256, a.size, a.build
|
|
||||||
FROM build_artifact a, job j, build b
|
|
||||||
WHERE b.id = a.build AND j.id = b.job
|
|
||||||
|}
|
|
||||||
|
|
||||||
let new_build_artifact_paths =
|
|
||||||
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
|
|
||||||
{| SELECT localpath, '_artifacts/' || substr(lower(hex(sha256)), 1, 2) || '/' || lower(hex(sha256))
|
|
||||||
FROM build_artifact
|
|
||||||
|}
|
|
||||||
|
|
||||||
let old_build_artifact_paths =
|
|
||||||
Caqti_type.unit ->* Caqti_type.(t2 string string) @@
|
|
||||||
{| SELECT '_artifacts/' || substr(lower(hex(a.sha256)), 1, 2) || '/' || lower(hex(a.sha256)),
|
|
||||||
j.name || '/' || b.uuid || '/output/' || a.filepath
|
|
||||||
FROM build_artifact a, job j, build b
|
|
||||||
WHERE b.id = a.build AND j.id = b.job
|
|
||||||
|}
|
|
||||||
|
|
||||||
let drop_build_artifact =
|
|
||||||
Caqti_type.(unit ->. unit) @@
|
|
||||||
"DROP TABLE build_artifact"
|
|
||||||
|
|
||||||
let rename_build_artifact =
|
|
||||||
Caqti_type.(unit ->. unit) @@
|
|
||||||
"ALTER TABLE new_build_artifact RENAME TO build_artifact"
|
|
||||||
|
|
||||||
let move_paths ?force datadir (old_path, new_path) =
|
|
||||||
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
|
|
||||||
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
|
|
||||||
Bos.OS.Path.move ?force old_path new_path
|
|
||||||
|
|
||||||
let copy_paths datadir (old_path, new_path) =
|
|
||||||
let old_path = Fpath.(datadir // v old_path) and new_path = Fpath.(datadir // v new_path) in
|
|
||||||
let new_path_tmp = Fpath.(new_path + "tmp") in
|
|
||||||
let* _created = Bos.OS.Dir.create (Fpath.parent new_path) in
|
|
||||||
let cmd = Bos.Cmd.(v "cp" % p old_path % p new_path_tmp) in
|
|
||||||
let* () =
|
|
||||||
match Bos.OS.Cmd.run_status cmd with
|
|
||||||
| Ok `Exited 0 ->
|
|
||||||
Ok ()
|
|
||||||
| Ok status ->
|
|
||||||
let _ = Bos.OS.Path.delete new_path_tmp in
|
|
||||||
Error (`Msg (Fmt.str "cp failed: %a" Bos.OS.Cmd.pp_status status))
|
|
||||||
| Error _ as e ->
|
|
||||||
let _ = Bos.OS.Path.delete new_path_tmp in
|
|
||||||
e
|
|
||||||
in
|
|
||||||
Bos.OS.Path.move ~force:true new_path_tmp new_path
|
|
||||||
|
|
||||||
let migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
|
||||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
|
||||||
let* () = Db.exec new_build_artifact () in
|
|
||||||
let* () = Db.exec copy_new_build_artifact () in
|
|
||||||
let* () = Db.iter_s new_build_artifact_paths (move_paths ~force:true datadir) () in
|
|
||||||
let* () = Db.exec drop_build_artifact () in
|
|
||||||
let* () = Db.exec rename_build_artifact () in
|
|
||||||
let* () = Db.exec idx_build_artifact_sha256 () in
|
|
||||||
let* () = Db.exec idx_build_artifact_build () in
|
|
||||||
Db.exec (Grej.set_version new_version) ()
|
|
||||||
|
|
||||||
let rollback datadir (module Db : Caqti_blocking.CONNECTION) =
|
|
||||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
|
||||||
let* () = Db.exec old_build_artifact () in
|
|
||||||
let* () = Db.exec copy_old_build_artifact () in
|
|
||||||
let* () = Db.iter_s old_build_artifact_paths (copy_paths datadir) () in
|
|
||||||
let* () =
|
|
||||||
Db.iter_s old_build_artifact_paths
|
|
||||||
(fun (old_path, _new_path) ->
|
|
||||||
Bos.OS.Path.delete Fpath.(datadir // v old_path))
|
|
||||||
()
|
|
||||||
in
|
|
||||||
let* () = Db.exec drop_build_artifact () in
|
|
||||||
let* () = Db.exec rename_build_artifact () in
|
|
||||||
let* () = Db.exec idx_build_artifact_sha256 () in
|
|
||||||
Db.exec (Grej.set_version old_version) ()
|
|
||||||
|
|
||||||
(* migration failed but managed to move *some* files *)
|
|
||||||
let fixup_migrate datadir (module Db : Caqti_blocking.CONNECTION) =
|
|
||||||
let* () = Grej.check_version ~user_version:old_version (module Db) in
|
|
||||||
let* () =
|
|
||||||
Db.iter_s new_build_artifact_paths
|
|
||||||
(fun (old_path, new_path) ->
|
|
||||||
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
|
|
||||||
let* new_exists = Bos.OS.Path.exists Fpath.(datadir // v new_path) in
|
|
||||||
if new_exists && not old_exists then
|
|
||||||
copy_paths datadir (new_path, old_path)
|
|
||||||
else Ok ())
|
|
||||||
()
|
|
||||||
in
|
|
||||||
Db.iter_s new_build_artifact_paths
|
|
||||||
(fun (_old_path, new_path) ->
|
|
||||||
Bos.OS.Path.delete Fpath.(datadir // v new_path))
|
|
||||||
()
|
|
||||||
|
|
||||||
(* rollback failed but some or all artifacts were copied *)
|
|
||||||
let fixup_rollback datadir (module Db : Caqti_blocking.CONNECTION) =
|
|
||||||
let* () = Grej.check_version ~user_version:new_version (module Db) in
|
|
||||||
Db.iter_s old_build_artifact_paths
|
|
||||||
(fun (old_path, new_path) ->
|
|
||||||
let* old_exists = Bos.OS.Path.exists Fpath.(datadir // v old_path) in
|
|
||||||
if old_exists then
|
|
||||||
Bos.OS.Path.delete Fpath.(datadir // v new_path)
|
|
||||||
else
|
|
||||||
move_paths datadir (new_path, old_path))
|
|
||||||
()
|
|
|
@ -1,9 +1,9 @@
|
||||||
opam-version: "2.0"
|
opam-version: "2.0"
|
||||||
maintainer: "Reynir Björnsson <reynir@reynir.dk>"
|
maintainer: "Reynir Björnsson <reynir@reynir.dk>"
|
||||||
authors: ["Reynir Björnsson <reynir@reynir.dk>"]
|
authors: ["Reynir Björnsson <reynir@reynir.dk>"]
|
||||||
homepage: "https://github.com/robur-coop/builder-web"
|
homepage: "https://github.com/roburio/builder-web"
|
||||||
dev-repo: "git+https://github.com/robur-coop/builder-web.git"
|
dev-repo: "git+https://github.com/roburio/builder-web.git"
|
||||||
bug-reports: "https://github.com/robur-coop/builder-web/issues"
|
bug-reports: "https://github.com/roburio/builder-web/issues"
|
||||||
license: "ISC"
|
license: "ISC"
|
||||||
|
|
||||||
build: [
|
build: [
|
||||||
|
@ -17,16 +17,18 @@ build: [
|
||||||
depends: [
|
depends: [
|
||||||
"ocaml" {>= "4.13.0"}
|
"ocaml" {>= "4.13.0"}
|
||||||
"dune" {>= "2.7.0"}
|
"dune" {>= "2.7.0"}
|
||||||
"builder" {>= "0.4.0"}
|
"builder" {>= "0.2.0"}
|
||||||
"dream" {>= "1.0.0~alpha7"}
|
"dream" {= "1.0.0~alpha4"}
|
||||||
|
"cstruct" {>= "6.0.0"}
|
||||||
"bos"
|
"bos"
|
||||||
"ohex" {>= "0.2.0"}
|
"hex"
|
||||||
"lwt" {>= "5.7.0"}
|
"lwt" {>= "5.6.0"}
|
||||||
"caqti" {>= "2.1.2"}
|
"caqti" {>= "1.8.0"}
|
||||||
"caqti-lwt"
|
"caqti-lwt"
|
||||||
"caqti-driver-sqlite3"
|
"caqti-driver-sqlite3"
|
||||||
"mirage-crypto-rng" {>= "0.11.0"}
|
"pbkdf"
|
||||||
"kdf"
|
"mirage-crypto-rng"
|
||||||
|
"scrypt-kdf"
|
||||||
"opam-core"
|
"opam-core"
|
||||||
"opam-format" {>= "2.1.0"}
|
"opam-format" {>= "2.1.0"}
|
||||||
"metrics" {>= "0.3.0"}
|
"metrics" {>= "0.3.0"}
|
||||||
|
@ -37,27 +39,25 @@ depends: [
|
||||||
"tyxml" {>= "4.3.0"}
|
"tyxml" {>= "4.3.0"}
|
||||||
"ptime"
|
"ptime"
|
||||||
"duration"
|
"duration"
|
||||||
"asn1-combinators" {>= "0.3.0"}
|
"mirage-crypto"
|
||||||
|
"asn1-combinators"
|
||||||
"logs"
|
"logs"
|
||||||
"cmdliner" {>= "1.1.0"}
|
"cmdliner" {>= "1.1.0"}
|
||||||
"uri"
|
"uri"
|
||||||
"fmt" {>= "0.8.7"}
|
"fmt" {>= "0.8.7"}
|
||||||
"cmarkit" {>= "0.3.0"}
|
"omd"
|
||||||
"tar" {>= "3.0.0"}
|
"tar"
|
||||||
"tar-unix" {>= "3.0.0"}
|
|
||||||
"owee"
|
"owee"
|
||||||
"solo5-elftool" {>= "0.3.0"}
|
"solo5-elftool" {>= "0.3.0"}
|
||||||
"decompress" {>= "1.5.0"}
|
"decompress"
|
||||||
"digestif" {>= "1.2.0"}
|
"alcotest" {with-test}
|
||||||
"alcotest" {>= "1.2.0" & with-test}
|
|
||||||
"ppx_deriving" {with-test}
|
"ppx_deriving" {with-test}
|
||||||
"ppx_deriving_yojson" {with-test}
|
"ppx_deriving_yojson" {with-test}
|
||||||
"yojson" {with-test}
|
|
||||||
]
|
]
|
||||||
|
|
||||||
synopsis: "Web interface for builder"
|
synopsis: "Web interface for builder"
|
||||||
description: """
|
description: """
|
||||||
Builder-web takes in submissions of builds, typically from [builder](https://github.com/robur-coop/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
Builder-web takes in submissions of builds, typically from [builder](https://github.com/roburio/builder/), and displays the produced artifacts in a way that makes it easy to compare checksums and build status.
|
||||||
Produced binaries can be downloaded and executed.
|
Produced binaries can be downloaded and executed.
|
||||||
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
|
[builds.robur.coop](https://builds.robur.coop/) itself runs builder-web.
|
||||||
"""
|
"""
|
||||||
|
|
183
db/builder_db.ml
183
db/builder_db.ml
|
@ -4,15 +4,15 @@ open Caqti_request.Infix
|
||||||
|
|
||||||
let application_id = 1234839235l
|
let application_id = 1234839235l
|
||||||
|
|
||||||
(* Please update this when making changes! And also update
|
(* Please update this when making changes! *)
|
||||||
packaging/batch-viz.sh and packaging/visualizations.sh. *)
|
let current_version = 16L
|
||||||
let current_version = 18L
|
|
||||||
|
|
||||||
type 'a id = 'a Rep.id
|
type 'a id = 'a Rep.id
|
||||||
|
|
||||||
type file = Rep.file = {
|
type file = Rep.file = {
|
||||||
filepath : Fpath.t;
|
filepath : Fpath.t;
|
||||||
sha256 : string;
|
localpath : Fpath.t;
|
||||||
|
sha256 : Cstruct.t;
|
||||||
size : int;
|
size : int;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ module Job = struct
|
||||||
|
|
||||||
let get_all_with_section_synopsis =
|
let get_all_with_section_synopsis =
|
||||||
Caqti_type.unit ->*
|
Caqti_type.unit ->*
|
||||||
Caqti_type.(t4 (id `job) string (option string) (option string)) @@
|
Caqti_type.(tup4 (id `job) string (option string) (option string)) @@
|
||||||
{| SELECT j.id, j.name, section.value, synopsis.value
|
{| SELECT j.id, j.name, section.value, synopsis.value
|
||||||
FROM job j, tag section_tag, tag synopsis_tag
|
FROM job j, tag section_tag, tag synopsis_tag
|
||||||
LEFT JOIN job_tag section ON section.job = j.id AND section.tag = section_tag.id
|
LEFT JOIN job_tag section ON section.job = j.id AND section.tag = section_tag.id
|
||||||
|
@ -117,15 +117,15 @@ module Job_tag = struct
|
||||||
"DROP TABLE IF EXISTS job_tag"
|
"DROP TABLE IF EXISTS job_tag"
|
||||||
|
|
||||||
let add =
|
let add =
|
||||||
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
Caqti_type.(tup3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
||||||
"INSERT INTO job_tag (tag, value, job) VALUES ($1, $2, $3)"
|
"INSERT INTO job_tag (tag, value, job) VALUES ($1, $2, $3)"
|
||||||
|
|
||||||
let update =
|
let update =
|
||||||
Caqti_type.(t3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
Caqti_type.(tup3 (id `tag) string (id `job)) ->. Caqti_type.unit @@
|
||||||
"UPDATE job_tag SET value = $2 WHERE tag = $1 AND job = $3"
|
"UPDATE job_tag SET value = $2 WHERE tag = $1 AND job = $3"
|
||||||
|
|
||||||
let get_value =
|
let get_value =
|
||||||
Caqti_type.(t2 (id `tag) (id `job)) ->? Caqti_type.string @@
|
Caqti_type.(tup2 (id `tag) (id `job)) ->? Caqti_type.string @@
|
||||||
"SELECT value FROM job_tag WHERE tag = ? AND job = ?"
|
"SELECT value FROM job_tag WHERE tag = ? AND job = ?"
|
||||||
|
|
||||||
let remove_by_job =
|
let remove_by_job =
|
||||||
|
@ -140,6 +140,7 @@ module Build_artifact = struct
|
||||||
{| CREATE TABLE build_artifact (
|
{| CREATE TABLE build_artifact (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
filepath TEXT NOT NULL, -- the path as in the build
|
filepath TEXT NOT NULL, -- the path as in the build
|
||||||
|
localpath TEXT NOT NULL, -- local path to the file on disk
|
||||||
sha256 BLOB NOT NULL,
|
sha256 BLOB NOT NULL,
|
||||||
size INTEGER NOT NULL,
|
size INTEGER NOT NULL,
|
||||||
build INTEGER NOT NULL,
|
build INTEGER NOT NULL,
|
||||||
|
@ -155,30 +156,26 @@ module Build_artifact = struct
|
||||||
|
|
||||||
let get =
|
let get =
|
||||||
id `build_artifact ->! file @@
|
id `build_artifact ->! file @@
|
||||||
{| SELECT filepath, sha256, size
|
{| SELECT filepath, localpath, sha256, size
|
||||||
FROM build_artifact WHERE id = ? |}
|
FROM build_artifact WHERE id = ? |}
|
||||||
|
|
||||||
let get_by_build_uuid =
|
let get_by_build_uuid =
|
||||||
Caqti_type.t2 uuid fpath ->? Caqti_type.t2 (id `build_artifact) file @@
|
Caqti_type.tup2 uuid fpath ->? Caqti_type.tup2 (id `build_artifact) file @@
|
||||||
{| SELECT build_artifact.id, build_artifact.filepath,
|
{| SELECT build_artifact.id, build_artifact.filepath,
|
||||||
build_artifact.sha256, build_artifact.size
|
build_artifact.localpath, build_artifact.sha256, build_artifact.size
|
||||||
FROM build_artifact
|
FROM build_artifact
|
||||||
INNER JOIN build ON build.id = build_artifact.build
|
INNER JOIN build ON build.id = build_artifact.build
|
||||||
WHERE build.uuid = ? AND build_artifact.filepath = ?
|
WHERE build.uuid = ? AND build_artifact.filepath = ?
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_all_by_build =
|
let get_all_by_build =
|
||||||
id `build ->* Caqti_type.(t2 (id `build_artifact) file) @@
|
id `build ->* Caqti_type.(tup2 (id `build_artifact) file) @@
|
||||||
"SELECT id, filepath, sha256, size FROM build_artifact WHERE build = ?"
|
"SELECT id, filepath, localpath, sha256, size FROM build_artifact WHERE build = ?"
|
||||||
|
|
||||||
let exists =
|
|
||||||
Caqti_type.octets ->! Caqti_type.bool @@
|
|
||||||
"SELECT EXISTS(SELECT 1 FROM build_artifact WHERE sha256 = ?)"
|
|
||||||
|
|
||||||
let add =
|
let add =
|
||||||
Caqti_type.(t2 file (id `build)) ->. Caqti_type.unit @@
|
Caqti_type.(tup2 file (id `build)) ->. Caqti_type.unit @@
|
||||||
"INSERT INTO build_artifact (filepath, sha256, size, build) \
|
"INSERT INTO build_artifact (filepath, localpath, sha256, size, build) \
|
||||||
VALUES (?, ?, ?, ?)"
|
VALUES (?, ?, ?, ?, ?)"
|
||||||
|
|
||||||
let remove_by_build =
|
let remove_by_build =
|
||||||
id `build ->. Caqti_type.unit @@
|
id `build ->. Caqti_type.unit @@
|
||||||
|
@ -199,55 +196,34 @@ module Build = struct
|
||||||
script : Fpath.t;
|
script : Fpath.t;
|
||||||
platform : string;
|
platform : string;
|
||||||
main_binary : [`build_artifact] id option;
|
main_binary : [`build_artifact] id option;
|
||||||
input_id : string option;
|
input_id : Cstruct.t option;
|
||||||
user_id : [`user] id;
|
user_id : [`user] id;
|
||||||
job_id : [`job] id;
|
job_id : [`job] id;
|
||||||
}
|
}
|
||||||
|
|
||||||
let pp ppf t =
|
|
||||||
Fmt.pf ppf "@[<hov>{ uuid=@ %a;@ \
|
|
||||||
start=@ %a;@ \
|
|
||||||
finish=@ %a;@ \
|
|
||||||
result=@ @[<hov>%a@];@ \
|
|
||||||
console=@ %a;@ \
|
|
||||||
script=@ %a;@ \
|
|
||||||
platform=@ %S;@ \
|
|
||||||
main_binary=@ @[<hov>%a@];@ \
|
|
||||||
input_id=@ @[<hov>%a@];@ \
|
|
||||||
user_id=@ %Lx;@ \
|
|
||||||
job_id=@ %Lx;@ }@]"
|
|
||||||
Uuidm.pp t.uuid
|
|
||||||
Ptime.pp t.start
|
|
||||||
Ptime.pp t.finish
|
|
||||||
Builder.pp_execution_result t.result
|
|
||||||
Fpath.pp t.console
|
|
||||||
Fpath.pp t.script
|
|
||||||
t.platform
|
|
||||||
Fmt.(Dump.option int64) t.main_binary
|
|
||||||
Fmt.(Dump.option string) t.input_id
|
|
||||||
t.user_id
|
|
||||||
t.job_id
|
|
||||||
|
|
||||||
|
|
||||||
let t =
|
let t =
|
||||||
let rep =
|
let rep =
|
||||||
Caqti_type.(t11
|
Caqti_type.(tup3
|
||||||
|
(tup4
|
||||||
uuid
|
uuid
|
||||||
|
(tup2
|
||||||
Rep.ptime
|
Rep.ptime
|
||||||
Rep.ptime
|
Rep.ptime)
|
||||||
|
(tup2
|
||||||
execution_result
|
execution_result
|
||||||
fpath
|
fpath)
|
||||||
|
(tup4
|
||||||
fpath
|
fpath
|
||||||
string
|
string
|
||||||
(option (Rep.id `build_artifact))
|
(option (Rep.id `build_artifact))
|
||||||
(option octets)
|
(option Rep.cstruct)))
|
||||||
(id `user)
|
(id `user)
|
||||||
(id `job))
|
(id `job))
|
||||||
in
|
in
|
||||||
let encode { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id } =
|
let encode { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id } =
|
||||||
Ok (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id)
|
Ok ((uuid, (start, finish), (result, console), (script, platform, main_binary, input_id)), user_id, job_id)
|
||||||
in
|
in
|
||||||
let decode (uuid, start, finish, result, console, script, platform, main_binary, input_id, user_id, job_id) =
|
let decode ((uuid, (start, finish), (result, console), (script, platform, main_binary, input_id)), user_id, job_id) =
|
||||||
Ok { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id }
|
Ok { uuid; start; finish; result; console; script; platform; main_binary; input_id; user_id; job_id }
|
||||||
in
|
in
|
||||||
Caqti_type.custom ~encode ~decode rep
|
Caqti_type.custom ~encode ~decode rep
|
||||||
|
@ -282,7 +258,7 @@ module Build = struct
|
||||||
"DROP TABLE IF EXISTS build"
|
"DROP TABLE IF EXISTS build"
|
||||||
|
|
||||||
let get_by_uuid =
|
let get_by_uuid =
|
||||||
Rep.uuid ->? Caqti_type.t2 (id `build) t @@
|
Rep.uuid ->? Caqti_type.tup2 (id `build) t @@
|
||||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||||
result_code, result_msg,
|
result_code, result_msg,
|
||||||
console, script, platform, main_binary, input_id, user, job
|
console, script, platform, main_binary, input_id, user, job
|
||||||
|
@ -291,7 +267,7 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_all =
|
let get_all =
|
||||||
id `job ->* Caqti_type.t2 (id `build) t @@
|
id `job ->* Caqti_type.tup2 (id `build) t @@
|
||||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
||||||
result_code, result_msg, console,
|
result_code, result_msg, console,
|
||||||
script, platform, main_binary, input_id, user, job
|
script, platform, main_binary, input_id, user, job
|
||||||
|
@ -301,20 +277,20 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_all_failed =
|
let get_all_failed =
|
||||||
Caqti_type.(t3 int int (option string)) ->* Caqti_type.t2 Caqti_type.string t @@
|
Caqti_type.(tup3 int int (option string)) ->* Caqti_type.tup2 Caqti_type.string t @@
|
||||||
{| SELECT job.name, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
{| SELECT job.name, b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||||
b.result_code, b.result_msg, b.console, b.script, b.platform,
|
b.result_code, b.result_msg, b.console, b.script, b.platform,
|
||||||
b.main_binary, b.input_id, b.user, b.job
|
b.main_binary, b.input_id, b.user, b.job
|
||||||
FROM build b
|
FROM build b
|
||||||
INNER JOIN job ON job.id = b.job
|
INNER JOIN job ON job.id = b.job
|
||||||
WHERE b.main_binary IS NULL AND ($3 IS NULL OR b.platform = $3)
|
WHERE b.result_code <> 0 AND ($3 IS NULL OR b.platform = $3)
|
||||||
ORDER BY start_d DESC, start_ps DESC
|
ORDER BY start_d DESC, start_ps DESC
|
||||||
LIMIT $2
|
LIMIT $2
|
||||||
OFFSET $1
|
OFFSET $1
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_all_artifact_sha =
|
let get_all_artifact_sha =
|
||||||
Caqti_type.(t2 (id `job) (option string)) ->* Caqti_type.octets @@
|
Caqti_type.(tup2 (id `job) (option string)) ->* Rep.cstruct @@
|
||||||
{| SELECT DISTINCT a.sha256
|
{| SELECT DISTINCT a.sha256
|
||||||
FROM build_artifact a, build b
|
FROM build_artifact a, build b
|
||||||
WHERE b.job = $1 AND b.main_binary = a.id
|
WHERE b.job = $1 AND b.main_binary = a.id
|
||||||
|
@ -323,79 +299,40 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_failed_builds =
|
let get_failed_builds =
|
||||||
Caqti_type.(t2 (id `job) (option string)) ->* t @@
|
Caqti_type.(tup2 (id `job) (option string)) ->* t @@
|
||||||
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
||||||
result_code, result_msg, console, script,
|
result_code, result_msg, console, script,
|
||||||
platform, main_binary, input_id, user, job
|
platform, main_binary, input_id, user, job
|
||||||
FROM build
|
FROM build
|
||||||
WHERE job = $1
|
WHERE job = $1 AND result_code <> 0
|
||||||
AND main_binary IS NULL
|
|
||||||
AND ($2 IS NULL OR platform = $2)
|
AND ($2 IS NULL OR platform = $2)
|
||||||
ORDER BY start_d DESC, start_ps DESC
|
ORDER BY start_d DESC, start_ps DESC
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_latest_successful_with_binary =
|
let get_latest_successful_with_binary =
|
||||||
Caqti_type.(t2 (id `job) string) ->? Caqti_type.t3 (id `build) t file @@
|
Caqti_type.(tup2 (id `job) string) ->? Caqti_type.tup3 (id `build) t file_opt @@
|
||||||
{| SELECT b.id,
|
{| SELECT b.id,
|
||||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||||
b.result_code, b.result_msg, b.console, b.script,
|
b.result_code, b.result_msg, b.console, b.script,
|
||||||
b.platform, b.main_binary, b.input_id, b.user, b.job,
|
b.platform, b.main_binary, b.input_id, b.user, b.job,
|
||||||
a.filepath, a.sha256, a.size
|
a.filepath, a.localpath, a.sha256, a.size
|
||||||
FROM build b, build_artifact a
|
FROM build b
|
||||||
WHERE b.main_binary = a.id AND b.job = $1 AND b.platform = $2
|
LEFT JOIN build_artifact a ON
|
||||||
AND b.main_binary IS NOT NULL
|
b.main_binary = a.id
|
||||||
|
WHERE b.job = $1 AND b.platform = $2 AND b.result_code = 0
|
||||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_builds_older_than =
|
|
||||||
Caqti_type.(t3 (id `job) (option string) Rep.ptime) ->* Caqti_type.t2 (id `build) t @@
|
|
||||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
|
||||||
result_code, result_msg, console, script,
|
|
||||||
platform, main_binary, input_id, user, job
|
|
||||||
FROM build
|
|
||||||
WHERE job = $1
|
|
||||||
AND ($2 IS NULL OR platform = $2)
|
|
||||||
AND (finish_d < $3 OR (finish_d = $3 AND finish_ps <= $4))
|
|
||||||
ORDER BY start_d DESC, start_ps DESC
|
|
||||||
|}
|
|
||||||
|
|
||||||
let get_builds_excluding_latest_n =
|
|
||||||
Caqti_type.(t3 (id `job) (option string) int) ->* Caqti_type.t2 (id `build) t @@
|
|
||||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
|
||||||
result_code, result_msg, console, script,
|
|
||||||
platform, main_binary, input_id, user, job
|
|
||||||
FROM build
|
|
||||||
WHERE job = $1
|
|
||||||
AND ($2 IS NULL OR platform = $2)
|
|
||||||
ORDER BY start_d DESC, start_ps DESC
|
|
||||||
LIMIT -1 OFFSET $3
|
|
||||||
|}
|
|
||||||
(* "LIMIT -1 OFFSET n" is all rows except the first n *)
|
|
||||||
|
|
||||||
let get_nth_latest_successful =
|
|
||||||
Caqti_type.(t3 (id `job) (option string) int) ->? Caqti_type.t2 (id `build) t @@
|
|
||||||
{| SELECT id, uuid, start_d, start_ps, finish_d, finish_ps,
|
|
||||||
result_code, result_msg, console, script,
|
|
||||||
platform, main_binary, input_id, user, job
|
|
||||||
FROM build
|
|
||||||
WHERE job = $1
|
|
||||||
AND ($2 IS NULL OR platform = $2)
|
|
||||||
AND main_binary IS NOT NULL
|
|
||||||
ORDER BY start_d DESC, start_ps DESC
|
|
||||||
LIMIT 1 OFFSET $3
|
|
||||||
|}
|
|
||||||
|
|
||||||
let get_latest_successful =
|
let get_latest_successful =
|
||||||
Caqti_type.(t2 (id `job) (option string)) ->? t @@
|
Caqti_type.(tup2 (id `job) (option string)) ->? t @@
|
||||||
{| SELECT
|
{| SELECT
|
||||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||||
b.result_code, b.result_msg, b.console, b.script,
|
b.result_code, b.result_msg, b.console, b.script,
|
||||||
b.platform, b.main_binary, b.input_id, b.user, b.job
|
b.platform, b.main_binary, b.input_id, b.user, b.job
|
||||||
FROM build b
|
FROM build b
|
||||||
WHERE b.job = $1
|
WHERE b.job = $1 AND b.result_code = 0
|
||||||
AND ($2 IS NULL OR b.platform = $2)
|
AND ($2 IS NULL OR b.platform = $2)
|
||||||
AND b.main_binary IS NOT NULL
|
|
||||||
ORDER BY b.start_d DESC, b.start_ps DESC
|
ORDER BY b.start_d DESC, b.start_ps DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
|}
|
|}
|
||||||
|
@ -409,7 +346,7 @@ module Build = struct
|
||||||
FROM build b, build b0, build_artifact a, build_artifact a0
|
FROM build b, build b0, build_artifact a, build_artifact a0
|
||||||
WHERE b0.id = ? AND b0.job = b.job AND
|
WHERE b0.id = ? AND b0.job = b.job AND
|
||||||
b.platform = b0.platform AND
|
b.platform = b0.platform AND
|
||||||
b.main_binary IS NOT NULL AND
|
b.result_code = 0 AND
|
||||||
a.id = b.main_binary AND a0.id = b0.main_binary AND
|
a.id = b.main_binary AND a0.id = b0.main_binary AND
|
||||||
a.sha256 <> a0.sha256 AND
|
a.sha256 <> a0.sha256 AND
|
||||||
(b0.start_d > b.start_d OR b0.start_d = b.start_d AND b0.start_ps > b.start_ps)
|
(b0.start_d > b.start_d OR b0.start_d = b.start_d AND b0.start_ps > b.start_ps)
|
||||||
|
@ -426,7 +363,7 @@ module Build = struct
|
||||||
FROM build b, build b0, build_artifact a, build_artifact a0
|
FROM build b, build b0, build_artifact a, build_artifact a0
|
||||||
WHERE b0.id = ? AND b0.job = b.job AND
|
WHERE b0.id = ? AND b0.job = b.job AND
|
||||||
b.platform = b0.platform AND
|
b.platform = b0.platform AND
|
||||||
b.main_binary IS NOT NULL AND
|
b.result_code = 0 AND
|
||||||
a.id = b.main_binary AND a0.id = b0.main_binary AND
|
a.id = b.main_binary AND a0.id = b0.main_binary AND
|
||||||
a.sha256 <> a0.sha256 AND
|
a.sha256 <> a0.sha256 AND
|
||||||
(b0.start_d < b.start_d OR b0.start_d = b.start_d AND b0.start_ps < b.start_ps)
|
(b0.start_d < b.start_d OR b0.start_d = b.start_d AND b0.start_ps < b.start_ps)
|
||||||
|
@ -446,7 +383,7 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_same_input_different_output_hashes =
|
let get_same_input_different_output_hashes =
|
||||||
id `build ->* Caqti_type.octets @@
|
id `build ->* Rep.cstruct @@
|
||||||
{| SELECT DISTINCT a.sha256
|
{| SELECT DISTINCT a.sha256
|
||||||
FROM build b0, build_artifact a0, build b, build_artifact a
|
FROM build b0, build_artifact a0, build b, build_artifact a
|
||||||
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 <> a.sha256
|
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 <> a.sha256
|
||||||
|
@ -455,7 +392,7 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_different_input_same_output_input_ids =
|
let get_different_input_same_output_input_ids =
|
||||||
id `build ->* Caqti_type.octets @@
|
id `build ->* Rep.cstruct @@
|
||||||
{| SELECT DISTINCT b.input_id
|
{| SELECT DISTINCT b.input_id
|
||||||
FROM build b0, build_artifact a0, build b, build_artifact a
|
FROM build b0, build_artifact a0, build b, build_artifact a
|
||||||
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 = a.sha256
|
WHERE b0.id = ? AND a0.id = b0.main_binary AND a0.sha256 = a.sha256
|
||||||
|
@ -463,7 +400,7 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_one_by_input_id =
|
let get_one_by_input_id =
|
||||||
Caqti_type.octets ->! t @@
|
Rep.cstruct ->! t @@
|
||||||
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
{| SELECT uuid, start_d, start_ps, finish_d, finish_ps,
|
||||||
result_code, result_msg, console, script,
|
result_code, result_msg, console, script,
|
||||||
platform, main_binary, input_id, user, job
|
platform, main_binary, input_id, user, job
|
||||||
|
@ -487,7 +424,7 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_by_hash =
|
let get_by_hash =
|
||||||
Caqti_type.octets ->! t @@
|
Rep.cstruct ->! t @@
|
||||||
{| SELECT
|
{| SELECT
|
||||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||||
b.result_code, b.result_msg, b.console, b.script,
|
b.result_code, b.result_msg, b.console, b.script,
|
||||||
|
@ -500,11 +437,11 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_with_main_binary_by_hash =
|
let get_with_main_binary_by_hash =
|
||||||
Caqti_type.octets ->! Caqti_type.t2 t file_opt @@
|
Rep.cstruct ->! Caqti_type.tup2 t file_opt @@
|
||||||
{| SELECT b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
{| SELECT b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||||
b.result_code, b.result_msg, b.console, b.script,
|
b.result_code, b.result_msg, b.console, b.script,
|
||||||
b.platform, b.main_binary, b.input_id, b.user, b.job,
|
b.platform, b.main_binary, b.input_id, b.user, b.job,
|
||||||
a.filepath, a.sha256, a.size
|
a.filepath, a.localpath, a.sha256, a.size
|
||||||
FROM build_artifact a
|
FROM build_artifact a
|
||||||
INNER JOIN build b ON b.id = a.build
|
INNER JOIN build b ON b.id = a.build
|
||||||
WHERE a.sha256 = ?
|
WHERE a.sha256 = ?
|
||||||
|
@ -513,7 +450,7 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let get_with_jobname_by_hash =
|
let get_with_jobname_by_hash =
|
||||||
Caqti_type.octets ->? Caqti_type.t2 Caqti_type.string t @@
|
Rep.cstruct ->? Caqti_type.tup2 Caqti_type.string t @@
|
||||||
{| SELECT job.name,
|
{| SELECT job.name,
|
||||||
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
b.uuid, b.start_d, b.start_ps, b.finish_d, b.finish_ps,
|
||||||
b.result_code, b.result_msg,
|
b.result_code, b.result_msg,
|
||||||
|
@ -527,7 +464,7 @@ module Build = struct
|
||||||
|}
|
|}
|
||||||
|
|
||||||
let set_main_binary =
|
let set_main_binary =
|
||||||
Caqti_type.t2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@
|
Caqti_type.tup2 (id `build) (id `build_artifact) ->. Caqti_type.unit @@
|
||||||
"UPDATE build SET main_binary = $2 WHERE id = $1"
|
"UPDATE build SET main_binary = $2 WHERE id = $1"
|
||||||
|
|
||||||
let remove =
|
let remove =
|
||||||
|
@ -555,7 +492,7 @@ module User = struct
|
||||||
"DROP TABLE IF EXISTS user"
|
"DROP TABLE IF EXISTS user"
|
||||||
|
|
||||||
let get_user =
|
let get_user =
|
||||||
Caqti_type.string ->? Caqti_type.t2 (id `user) user_info @@
|
Caqti_type.string ->? Caqti_type.tup2 (id `user) user_info @@
|
||||||
{| SELECT id, username, password_hash, password_salt,
|
{| SELECT id, username, password_hash, password_salt,
|
||||||
scrypt_n, scrypt_r, scrypt_p, restricted
|
scrypt_n, scrypt_r, scrypt_p, restricted
|
||||||
FROM user
|
FROM user
|
||||||
|
@ -609,15 +546,15 @@ module Access_list = struct
|
||||||
"DROP TABLE IF EXISTS access_list"
|
"DROP TABLE IF EXISTS access_list"
|
||||||
|
|
||||||
let get =
|
let get =
|
||||||
Caqti_type.t2 (id `user) (id `job) ->! id `access_list @@
|
Caqti_type.tup2 (id `user) (id `job) ->! id `access_list @@
|
||||||
"SELECT id FROM access_list WHERE user = ? AND job = ?"
|
"SELECT id FROM access_list WHERE user = ? AND job = ?"
|
||||||
|
|
||||||
let add =
|
let add =
|
||||||
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@
|
Caqti_type.tup2 (id `user) (id `job) ->. Caqti_type.unit @@
|
||||||
"INSERT INTO access_list (user, job) VALUES (?, ?)"
|
"INSERT INTO access_list (user, job) VALUES (?, ?)"
|
||||||
|
|
||||||
let remove =
|
let remove =
|
||||||
Caqti_type.t2 (id `user) (id `job) ->. Caqti_type.unit @@
|
Caqti_type.tup2 (id `user) (id `job) ->. Caqti_type.unit @@
|
||||||
"DELETE FROM access_list WHERE user = ? AND job = ?"
|
"DELETE FROM access_list WHERE user = ? AND job = ?"
|
||||||
|
|
||||||
let remove_by_job =
|
let remove_by_job =
|
||||||
|
@ -648,15 +585,13 @@ let migrate = [
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
Caqti_type.unit ->. Caqti_type.unit @@
|
||||||
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)";
|
"CREATE INDEX idx_build_job_start ON build(job, start_d DESC, start_ps DESC)";
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
Caqti_type.unit ->. Caqti_type.unit @@
|
||||||
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE main_binary IS NULL";
|
"CREATE INDEX idx_build_failed ON build(job, start_d DESC, start_ps DESC) WHERE result_code <> 0";
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
Caqti_type.unit ->. Caqti_type.unit @@
|
||||||
"CREATE INDEX idx_build_input_id ON build(input_id)";
|
"CREATE INDEX idx_build_input_id ON build(input_id)";
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
Caqti_type.unit ->. Caqti_type.unit @@
|
||||||
"CREATE INDEX idx_build_main_binary ON build(main_binary)";
|
"CREATE INDEX idx_build_main_binary ON build(main_binary)";
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
Caqti_type.unit ->. Caqti_type.unit @@
|
||||||
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)";
|
"CREATE INDEX idx_build_artifact_sha256 ON build_artifact(sha256)";
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
|
||||||
"CREATE INDEX idx_build_artifact_build ON build_artifact(build)";
|
|
||||||
set_current_version;
|
set_current_version;
|
||||||
set_application_id;
|
set_application_id;
|
||||||
]
|
]
|
||||||
|
@ -670,8 +605,6 @@ let rollback = [
|
||||||
Build.rollback;
|
Build.rollback;
|
||||||
Job.rollback;
|
Job.rollback;
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
Caqti_type.unit ->. Caqti_type.unit @@
|
||||||
"DROP INDEX IF EXISTS idx_build_artifact_build";
|
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
|
||||||
"DROP INDEX IF EXISTS idx_build_artifact_sha256";
|
"DROP INDEX IF EXISTS idx_build_artifact_sha256";
|
||||||
Caqti_type.unit ->. Caqti_type.unit @@
|
Caqti_type.unit ->. Caqti_type.unit @@
|
||||||
"DROP INDEX IF EXISTS idx_build_failed";
|
"DROP INDEX IF EXISTS idx_build_failed";
|
||||||
|
|
|
@ -3,7 +3,8 @@ module Rep : sig
|
||||||
type 'a id
|
type 'a id
|
||||||
type file = {
|
type file = {
|
||||||
filepath : Fpath.t;
|
filepath : Fpath.t;
|
||||||
sha256 : string;
|
localpath : Fpath.t;
|
||||||
|
sha256 : Cstruct.t;
|
||||||
size : int;
|
size : int;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -13,6 +14,7 @@ module Rep : sig
|
||||||
val uuid : Uuidm.t Caqti_type.t
|
val uuid : Uuidm.t Caqti_type.t
|
||||||
val ptime : Ptime.t Caqti_type.t
|
val ptime : Ptime.t Caqti_type.t
|
||||||
val fpath : Fpath.t Caqti_type.t
|
val fpath : Fpath.t Caqti_type.t
|
||||||
|
val cstruct : Cstruct.t Caqti_type.t
|
||||||
val file : file Caqti_type.t
|
val file : file Caqti_type.t
|
||||||
val execution_result : Builder.execution_result Caqti_type.t
|
val execution_result : Builder.execution_result Caqti_type.t
|
||||||
val console : (int * string) list Caqti_type.t
|
val console : (int * string) list Caqti_type.t
|
||||||
|
@ -21,7 +23,8 @@ type 'a id = 'a Rep.id
|
||||||
|
|
||||||
type file = Rep.file = {
|
type file = Rep.file = {
|
||||||
filepath : Fpath.t;
|
filepath : Fpath.t;
|
||||||
sha256 : string;
|
localpath : Fpath.t;
|
||||||
|
sha256 : Cstruct.t;
|
||||||
size : int;
|
size : int;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,7 +87,6 @@ module Build_artifact : sig
|
||||||
Caqti_request.t
|
Caqti_request.t
|
||||||
val get_all_by_build :
|
val get_all_by_build :
|
||||||
([`build] id, [`build_artifact] id * file, [ `Many | `One | `Zero ]) Caqti_request.t
|
([`build] id, [`build_artifact] id * file, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||||
val exists : (string, bool, [ `One ]) Caqti_request.t
|
|
||||||
val add :
|
val add :
|
||||||
(file * [`build] id, unit, [ `Zero ]) Caqti_request.t
|
(file * [`build] id, unit, [ `Zero ]) Caqti_request.t
|
||||||
val remove_by_build :
|
val remove_by_build :
|
||||||
|
@ -104,13 +106,11 @@ sig
|
||||||
script : Fpath.t;
|
script : Fpath.t;
|
||||||
platform : string;
|
platform : string;
|
||||||
main_binary : [`build_artifact] id option;
|
main_binary : [`build_artifact] id option;
|
||||||
input_id : string option;
|
input_id : Cstruct.t option;
|
||||||
user_id : [`user] id;
|
user_id : [`user] id;
|
||||||
job_id : [`job] id;
|
job_id : [`job] id;
|
||||||
}
|
}
|
||||||
|
|
||||||
val pp : t Fmt.t
|
|
||||||
|
|
||||||
val get_by_uuid :
|
val get_by_uuid :
|
||||||
(Uuidm.t, [`build] id * t, [ `One | `Zero ])
|
(Uuidm.t, [`build] id * t, [ `One | `Zero ])
|
||||||
Caqti_request.t
|
Caqti_request.t
|
||||||
|
@ -119,21 +119,15 @@ sig
|
||||||
val get_all_failed :
|
val get_all_failed :
|
||||||
(int * int * string option, string * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
(int * int * string option, string * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||||
val get_all_artifact_sha :
|
val get_all_artifact_sha :
|
||||||
([`job] id * string option, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
([`job] id * string option, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||||
val get_latest_successful_with_binary :
|
val get_latest_successful_with_binary :
|
||||||
([`job] id * string, [`build] id * t * file, [ `One | `Zero ])
|
([`job] id * string, [`build] id * t * file option, [ `One | `Zero ])
|
||||||
Caqti_request.t
|
Caqti_request.t
|
||||||
val get_failed_builds :
|
val get_failed_builds :
|
||||||
([`job] id * string option, t, [ `Many | `One | `Zero ]) Caqti_request.t
|
([`job] id * string option, t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||||
val get_latest_successful :
|
val get_latest_successful :
|
||||||
([`job] id * string option, t, [ `One | `Zero ])
|
([`job] id * string option, t, [ `One | `Zero ])
|
||||||
Caqti_request.t
|
Caqti_request.t
|
||||||
val get_builds_older_than :
|
|
||||||
([`job] id * string option * Ptime.t, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
|
||||||
val get_builds_excluding_latest_n :
|
|
||||||
([`job] id * string option * int, [`build] id * t, [ `Many | `One | `Zero ]) Caqti_request.t
|
|
||||||
val get_nth_latest_successful :
|
|
||||||
([`job] id * string option * int, [`build] id * t, [ `One | `Zero ]) Caqti_request.t
|
|
||||||
val get_previous_successful_different_output :
|
val get_previous_successful_different_output :
|
||||||
([`build] id, t, [ `One | `Zero ])
|
([`build] id, t, [ `One | `Zero ])
|
||||||
Caqti_request.t
|
Caqti_request.t
|
||||||
|
@ -143,20 +137,20 @@ sig
|
||||||
val get_same_input_same_output_builds :
|
val get_same_input_same_output_builds :
|
||||||
([`build] id, t, [ `Many | `One | `Zero ]) Caqti_request.t
|
([`build] id, t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||||
val get_same_input_different_output_hashes :
|
val get_same_input_different_output_hashes :
|
||||||
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
([`build] id, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||||
val get_different_input_same_output_input_ids :
|
val get_different_input_same_output_input_ids :
|
||||||
([`build] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
([`build] id, Cstruct.t, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||||
val get_one_by_input_id :
|
val get_one_by_input_id :
|
||||||
(string, t, [ `One ]) Caqti_request.t
|
(Cstruct.t, t, [ `One ]) Caqti_request.t
|
||||||
val get_platforms_for_job :
|
val get_platforms_for_job :
|
||||||
([`job] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
([`job] id, string, [ `Many | `One | `Zero ]) Caqti_request.t
|
||||||
val add : (t, unit, [ `Zero ]) Caqti_request.t
|
val add : (t, unit, [ `Zero ]) Caqti_request.t
|
||||||
val get_by_hash :
|
val get_by_hash :
|
||||||
(string, t, [ `One]) Caqti_request.t
|
(Cstruct.t, t, [ `One]) Caqti_request.t
|
||||||
val get_with_main_binary_by_hash :
|
val get_with_main_binary_by_hash :
|
||||||
(string, t * file option, [ `One]) Caqti_request.t
|
(Cstruct.t, t * file option, [ `One]) Caqti_request.t
|
||||||
val get_with_jobname_by_hash :
|
val get_with_jobname_by_hash :
|
||||||
(string, string * t, [ `One | `Zero]) Caqti_request.t
|
(Cstruct.t, string * t, [ `One | `Zero]) Caqti_request.t
|
||||||
val set_main_binary : ([`build] id * [`build_artifact] id, unit, [ `Zero ]) Caqti_request.t
|
val set_main_binary : ([`build] id * [`build_artifact] id, unit, [ `Zero ]) Caqti_request.t
|
||||||
val remove : ([`build] id, unit, [ `Zero]) Caqti_request.t
|
val remove : ([`build] id, unit, [ `Zero]) Caqti_request.t
|
||||||
end
|
end
|
||||||
|
|
2
db/dune
2
db/dune
|
@ -1,3 +1,3 @@
|
||||||
(library
|
(library
|
||||||
(name builder_db)
|
(name builder_db)
|
||||||
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators builder_web_auth))
|
(libraries builder caqti caqti-driver-sqlite3 asn1-combinators mirage-crypto builder_web_auth))
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
module Asn = struct
|
module Asn = struct
|
||||||
let decode_strict codec cs =
|
let decode_strict codec cs =
|
||||||
match Asn.decode codec cs with
|
match Asn.decode codec cs with
|
||||||
| Ok (a, rest) ->
|
| Ok (a, cs) ->
|
||||||
if String.length rest = 0
|
if Cstruct.length cs = 0
|
||||||
then Ok a
|
then Ok a
|
||||||
else Error "trailing bytes"
|
else Error "trailing bytes"
|
||||||
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
| Error (`Parse msg) -> Error ("parse error: " ^ msg)
|
||||||
|
@ -17,7 +17,7 @@ module Asn = struct
|
||||||
(required ~label:"delta" int)
|
(required ~label:"delta" int)
|
||||||
(required ~label:"data" utf8_string)))
|
(required ~label:"data" utf8_string)))
|
||||||
|
|
||||||
let console_of_str, console_to_str = projections_of console
|
let console_of_cs, console_to_cs = projections_of console
|
||||||
end
|
end
|
||||||
|
|
||||||
type untyped_id = int64
|
type untyped_id = int64
|
||||||
|
@ -30,7 +30,8 @@ let id_to_int64 (id : 'a id) : int64 = id
|
||||||
|
|
||||||
type file = {
|
type file = {
|
||||||
filepath : Fpath.t;
|
filepath : Fpath.t;
|
||||||
sha256 : string;
|
localpath : Fpath.t;
|
||||||
|
sha256 : Cstruct.t;
|
||||||
size : int;
|
size : int;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,7 +48,7 @@ let ptime =
|
||||||
let encode t = Ok (Ptime.Span.to_d_ps (Ptime.to_span t)) in
|
let encode t = Ok (Ptime.Span.to_d_ps (Ptime.to_span t)) in
|
||||||
let decode (d, ps) = Ok (Ptime.v (d, ps))
|
let decode (d, ps) = Ok (Ptime.v (d, ps))
|
||||||
in
|
in
|
||||||
let rep = Caqti_type.(t2 int int64) in
|
let rep = Caqti_type.(tup2 int int64) in
|
||||||
Caqti_type.custom ~encode ~decode rep
|
Caqti_type.custom ~encode ~decode rep
|
||||||
|
|
||||||
let fpath =
|
let fpath =
|
||||||
|
@ -56,25 +57,30 @@ let fpath =
|
||||||
|> Result.map_error (fun (`Msg s) -> s) in
|
|> Result.map_error (fun (`Msg s) -> s) in
|
||||||
Caqti_type.custom ~encode ~decode Caqti_type.string
|
Caqti_type.custom ~encode ~decode Caqti_type.string
|
||||||
|
|
||||||
|
let cstruct =
|
||||||
|
let encode t = Ok (Cstruct.to_string t) in
|
||||||
|
let decode s = Ok (Cstruct.of_string s) in
|
||||||
|
Caqti_type.custom ~encode ~decode Caqti_type.octets
|
||||||
|
|
||||||
let file =
|
let file =
|
||||||
let encode { filepath; sha256; size } =
|
let encode { filepath; localpath; sha256; size } =
|
||||||
Ok (filepath, sha256, size) in
|
Ok (filepath, localpath, sha256, size) in
|
||||||
let decode (filepath, sha256, size) =
|
let decode (filepath, localpath, sha256, size) =
|
||||||
Ok { filepath; sha256; size } in
|
Ok { filepath; localpath; sha256; size } in
|
||||||
Caqti_type.custom ~encode ~decode Caqti_type.(t3 fpath octets int)
|
Caqti_type.custom ~encode ~decode Caqti_type.(tup4 fpath fpath cstruct int)
|
||||||
|
|
||||||
let file_opt =
|
let file_opt =
|
||||||
let rep = Caqti_type.(t3 (option fpath) (option octets) (option int)) in
|
let rep = Caqti_type.(tup4 (option fpath) (option fpath) (option cstruct) (option int)) in
|
||||||
let encode = function
|
let encode = function
|
||||||
| Some { filepath; sha256; size } ->
|
| Some { filepath; localpath; sha256; size } ->
|
||||||
Ok (Some filepath, Some sha256, Some size)
|
Ok (Some filepath, Some localpath, Some sha256, Some size)
|
||||||
| None ->
|
| None ->
|
||||||
Ok (None, None, None)
|
Ok (None, None, None, None)
|
||||||
in
|
in
|
||||||
let decode = function
|
let decode = function
|
||||||
| (Some filepath, Some sha256, Some size) ->
|
| (Some filepath, Some localpath, Some sha256, Some size) ->
|
||||||
Ok (Some { filepath; sha256; size })
|
Ok (Some { filepath; localpath; sha256; size })
|
||||||
| (None, None, None) ->
|
| (None, None, None, None) ->
|
||||||
Ok None
|
Ok None
|
||||||
| _ ->
|
| _ ->
|
||||||
(* This should not happen if the database is well-formed *)
|
(* This should not happen if the database is well-formed *)
|
||||||
|
@ -103,25 +109,25 @@ let execution_result =
|
||||||
else
|
else
|
||||||
Error "bad encoding (unknown number)"
|
Error "bad encoding (unknown number)"
|
||||||
in
|
in
|
||||||
let rep = Caqti_type.(t2 int (option string)) in
|
let rep = Caqti_type.(tup2 int (option string)) in
|
||||||
Caqti_type.custom ~encode ~decode rep
|
Caqti_type.custom ~encode ~decode rep
|
||||||
|
|
||||||
let console =
|
let console =
|
||||||
let encode console = Ok (Asn.console_to_str console) in
|
let encode console = Ok (Asn.console_to_cs console) in
|
||||||
let decode data = Asn.console_of_str data in
|
let decode data = Asn.console_of_cs data in
|
||||||
Caqti_type.(custom ~encode ~decode octets)
|
Caqti_type.custom ~encode ~decode cstruct
|
||||||
|
|
||||||
let user_info =
|
let user_info =
|
||||||
let rep = Caqti_type.(t7 string octets octets int int int bool) in
|
let rep = Caqti_type.(tup4 string cstruct cstruct (tup4 int int int bool)) in
|
||||||
let encode { Builder_web_auth.username;
|
let encode { Builder_web_auth.username;
|
||||||
password_hash = `Scrypt (password_hash, password_salt, {
|
password_hash = `Scrypt (password_hash, password_salt, {
|
||||||
Builder_web_auth.scrypt_n; scrypt_r; scrypt_p
|
Builder_web_auth.scrypt_n; scrypt_r; scrypt_p
|
||||||
});
|
});
|
||||||
restricted; }
|
restricted; }
|
||||||
=
|
=
|
||||||
Ok (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted)
|
Ok (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted))
|
||||||
in
|
in
|
||||||
let decode (username, password_hash, password_salt, scrypt_n, scrypt_r, scrypt_p, restricted) =
|
let decode (username, password_hash, password_salt, (scrypt_n, scrypt_r, scrypt_p, restricted)) =
|
||||||
Ok { Builder_web_auth.username;
|
Ok { Builder_web_auth.username;
|
||||||
password_hash =
|
password_hash =
|
||||||
`Scrypt (password_hash, password_salt,
|
`Scrypt (password_hash, password_salt,
|
||||||
|
|
|
@ -1,3 +1,2 @@
|
||||||
(lang dune 2.7)
|
(lang dune 2.7)
|
||||||
(name builder-web)
|
(name builder-web)
|
||||||
(formatting disabled)
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ let init_datadir datadir =
|
||||||
let init dbpath datadir =
|
let init dbpath datadir =
|
||||||
Result.bind (init_datadir datadir) @@ fun () ->
|
Result.bind (init_datadir datadir) @@ fun () ->
|
||||||
Lwt_main.run (
|
Lwt_main.run (
|
||||||
Caqti_lwt_unix.connect
|
Caqti_lwt.connect
|
||||||
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
(Uri.make ~scheme:"sqlite3" ~path:dbpath ~query:["create", ["false"]] ())
|
||||||
>>= fun (module Db : Caqti_lwt.CONNECTION) ->
|
>>= fun (module Db : Caqti_lwt.CONNECTION) ->
|
||||||
Db.find Builder_db.get_application_id () >>= fun application_id ->
|
Db.find Builder_db.get_application_id () >>= fun application_id ->
|
||||||
|
@ -199,6 +199,10 @@ module Viz_aux = struct
|
||||||
|
|
||||||
let hash_viz_input ~uuid typ db =
|
let hash_viz_input ~uuid typ db =
|
||||||
let open Builder_db in
|
let open Builder_db in
|
||||||
|
let hex cstruct =
|
||||||
|
let `Hex hex_str = Hex.of_cstruct cstruct in
|
||||||
|
hex_str
|
||||||
|
in
|
||||||
main_binary_of_uuid uuid db >>= fun main_binary ->
|
main_binary_of_uuid uuid db >>= fun main_binary ->
|
||||||
Model.build uuid db
|
Model.build uuid db
|
||||||
|> if_error "Error getting build" >>= fun (build_id, _build) ->
|
|> if_error "Error getting build" >>= fun (build_id, _build) ->
|
||||||
|
@ -207,28 +211,28 @@ module Viz_aux = struct
|
||||||
match typ with
|
match typ with
|
||||||
| `Treemap ->
|
| `Treemap ->
|
||||||
let debug_binary =
|
let debug_binary =
|
||||||
let bin = Fpath.base main_binary.filepath in
|
let bin = Fpath.base main_binary.localpath in
|
||||||
List.find_opt
|
List.find_opt
|
||||||
(fun p -> Fpath.(equal (bin + "debug") (base p.filepath)))
|
(fun p -> Fpath.(equal (bin + "debug") (base p.localpath)))
|
||||||
artifacts
|
artifacts
|
||||||
in
|
in
|
||||||
begin
|
begin
|
||||||
Model.not_found debug_binary
|
Model.not_found debug_binary
|
||||||
|> not_found_error >>= fun debug_binary ->
|
|> not_found_error >>= fun debug_binary ->
|
||||||
debug_binary.sha256
|
debug_binary.sha256
|
||||||
|> Ohex.encode
|
|> hex
|
||||||
|> Lwt_result.return
|
|> Lwt_result.return
|
||||||
end
|
end
|
||||||
| `Dependencies ->
|
| `Dependencies ->
|
||||||
let opam_switch =
|
let opam_switch =
|
||||||
List.find_opt
|
List.find_opt
|
||||||
(fun p -> Fpath.(equal (v "opam-switch") (base p.filepath)))
|
(fun p -> Fpath.(equal (v "opam-switch") (base p.localpath)))
|
||||||
artifacts
|
artifacts
|
||||||
in
|
in
|
||||||
Model.not_found opam_switch
|
Model.not_found opam_switch
|
||||||
|> not_found_error >>= fun opam_switch ->
|
|> not_found_error >>= fun opam_switch ->
|
||||||
opam_switch.sha256
|
opam_switch.sha256
|
||||||
|> Ohex.encode
|
|> hex
|
||||||
|> Lwt_result.return
|
|> Lwt_result.return
|
||||||
|
|
||||||
let try_load_cached_visualization ~cachedir ~uuid viz_typ db =
|
let try_load_cached_visualization ~cachedir ~uuid viz_typ db =
|
||||||
|
@ -243,7 +247,7 @@ module Viz_aux = struct
|
||||||
|> Lwt.return
|
|> Lwt.return
|
||||||
|> if_error "Error finding a version of the requested visualization")
|
|> if_error "Error finding a version of the requested visualization")
|
||||||
>>= fun viz_path ->
|
>>= fun viz_path ->
|
||||||
Lwt_result.catch (fun () ->
|
Lwt_result.catch (
|
||||||
Lwt_io.with_file ~mode:Lwt_io.Input
|
Lwt_io.with_file ~mode:Lwt_io.Input
|
||||||
(Fpath.to_string viz_path)
|
(Fpath.to_string viz_path)
|
||||||
Lwt_io.read
|
Lwt_io.read
|
||||||
|
@ -254,17 +258,8 @@ module Viz_aux = struct
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
let routes ~datadir ~cachedir ~configdir =
|
||||||
let builds ~all ?(filter_builds_later_than = 0) req =
|
let builds req =
|
||||||
let than =
|
|
||||||
if filter_builds_later_than = 0 then
|
|
||||||
Ptime.epoch
|
|
||||||
else
|
|
||||||
let n = Ptime.Span.v (filter_builds_later_than, 0L) in
|
|
||||||
let now = Ptime_clock.now () in
|
|
||||||
Ptime.Span.sub (Ptime.to_span now) n |> Ptime.of_span |>
|
|
||||||
Option.fold ~none:Ptime.epoch ~some:Fun.id
|
|
||||||
in
|
|
||||||
Dream.sql req Model.jobs_with_section_synopsis
|
Dream.sql req Model.jobs_with_section_synopsis
|
||||||
|> if_error "Error getting jobs"
|
|> if_error "Error getting jobs"
|
||||||
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
|
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
|
||||||
|
@ -277,17 +272,11 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||||
r >>= fun acc ->
|
r >>= fun acc ->
|
||||||
Dream.sql req (Model.build_with_main_binary job_id platform) >>= function
|
Dream.sql req (Model.build_with_main_binary job_id platform) >>= function
|
||||||
| Some (build, artifact) ->
|
| Some (build, artifact) ->
|
||||||
if Ptime.is_later ~than build.finish then
|
|
||||||
Lwt_result.return ((platform, build, artifact) :: acc)
|
Lwt_result.return ((platform, build, artifact) :: acc)
|
||||||
else
|
|
||||||
Lwt_result.return acc
|
|
||||||
| None ->
|
| None ->
|
||||||
Log.warn (fun m -> m "Job without builds: %s" job_name);
|
Log.warn (fun m -> m "Job without builds: %s" job_name);
|
||||||
Lwt_result.return acc)
|
Lwt_result.return acc)
|
||||||
ps (Lwt_result.return []) >>= fun platform_builds ->
|
ps (Lwt_result.return []) >>= fun platform_builds ->
|
||||||
if platform_builds = [] then
|
|
||||||
Lwt_result.return acc
|
|
||||||
else
|
|
||||||
let v = (job_name, synopsis, platform_builds) in
|
let v = (job_name, synopsis, platform_builds) in
|
||||||
let section = Option.value ~default:"Uncategorized" section in
|
let section = Option.value ~default:"Uncategorized" section in
|
||||||
Lwt_result.return (Utils.String_map.add_or_create section v acc))
|
Lwt_result.return (Utils.String_map.add_or_create section v acc))
|
||||||
|
@ -296,7 +285,7 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||||
|> if_error "Error getting jobs"
|
|> if_error "Error getting jobs"
|
||||||
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
|
~log:(fun e -> Log.warn (fun m -> m "Error getting jobs: %a" pp_error e))
|
||||||
>>= fun jobs ->
|
>>= fun jobs ->
|
||||||
Views.Builds.make ~all jobs |> string_of_html |> Dream.html |> Lwt_result.ok
|
Views.Builds.make jobs |> string_of_html |> Dream.html |> Lwt_result.ok
|
||||||
in
|
in
|
||||||
|
|
||||||
let job req =
|
let job req =
|
||||||
|
@ -424,15 +413,15 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||||
|> if_error ~status:`Not_Found "File not found" >>= fun filepath ->
|
|> if_error ~status:`Not_Found "File not found" >>= fun filepath ->
|
||||||
Dream.sql req (Model.build_artifact build filepath)
|
Dream.sql req (Model.build_artifact build filepath)
|
||||||
|> if_error "Error getting build artifact" >>= fun file ->
|
|> if_error "Error getting build artifact" >>= fun file ->
|
||||||
let etag = Base64.encode_string file.Builder_db.sha256 in
|
let etag = Base64.encode_string (Cstruct.to_string file.Builder_db.sha256) in
|
||||||
match if_none_match with
|
match if_none_match with
|
||||||
| Some etag' when etag = etag' ->
|
| Some etag' when etag = etag' ->
|
||||||
Dream.empty `Not_Modified |> Lwt_result.ok
|
Dream.empty `Not_Modified |> Lwt_result.ok
|
||||||
| _ ->
|
| _ ->
|
||||||
Model.build_artifact_data datadir file
|
Model.build_artifact_data datadir file
|
||||||
|> if_error "Error getting build artifact"
|
|> if_error "Error getting build artifact"
|
||||||
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a: %a"
|
~log:(fun e -> Log.warn (fun m -> m "Error getting build artifact data for file %a in %a: %a"
|
||||||
Fpath.pp file.Builder_db.filepath
|
Fpath.pp file.Builder_db.filepath Fpath.pp file.Builder_db.localpath
|
||||||
pp_error e)) >>= fun data ->
|
pp_error e)) >>= fun data ->
|
||||||
let headers = [
|
let headers = [
|
||||||
"Content-Type", mime_lookup file.Builder_db.filepath;
|
"Content-Type", mime_lookup file.Builder_db.filepath;
|
||||||
|
@ -482,19 +471,13 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||||
|> Option.to_result ~none:(`Msg "bad finish time") |> Result.map Int64.of_int
|
|> Option.to_result ~none:(`Msg "bad finish time") |> Result.map Int64.of_int
|
||||||
|> Lwt.return |> if_error "Internal server error" >>= fun finish ->
|
|> Lwt.return |> if_error "Internal server error" >>= fun finish ->
|
||||||
Dream.stream ~headers:["Content-Type", "application/tar+gzip"]
|
Dream.stream ~headers:["Content-Type", "application/tar+gzip"]
|
||||||
(fun stream ->
|
(Dream_tar.targz_response datadir finish artifacts)
|
||||||
let+ r = Dream_tar.targz_response datadir finish artifacts stream in
|
|
||||||
match r with
|
|
||||||
| Ok () -> ()
|
|
||||||
| Error _ ->
|
|
||||||
Log.warn (fun m -> m "error assembling gzipped tar archive");
|
|
||||||
())
|
|
||||||
|> Lwt_result.ok
|
|> Lwt_result.ok
|
||||||
in
|
in
|
||||||
|
|
||||||
let upload req =
|
let upload req =
|
||||||
let* body = Dream.body req in
|
let* body = Dream.body req in
|
||||||
Builder.Asn.exec_of_str body |> Lwt.return
|
Builder.Asn.exec_of_cs (Cstruct.of_string body) |> Lwt.return
|
||||||
|> if_error ~status:`Bad_Request "Bad request"
|
|> if_error ~status:`Bad_Request "Bad request"
|
||||||
~log:(fun e ->
|
~log:(fun e ->
|
||||||
Log.warn (fun m -> m "Received bad builder ASN.1");
|
Log.warn (fun m -> m "Received bad builder ASN.1");
|
||||||
|
@ -526,7 +509,7 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||||
Dream.query req "sha256" |> Option.to_result ~none:(`Msg "Missing sha256 query parameter")
|
Dream.query req "sha256" |> Option.to_result ~none:(`Msg "Missing sha256 query parameter")
|
||||||
|> Lwt.return
|
|> Lwt.return
|
||||||
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash_hex ->
|
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash_hex ->
|
||||||
begin try Ohex.decode hash_hex |> Lwt_result.return
|
begin try Hex.to_cstruct (`Hex hash_hex) |> Lwt_result.return
|
||||||
with Invalid_argument e -> Lwt_result.fail (`Msg ("Bad hex: " ^ e))
|
with Invalid_argument e -> Lwt_result.fail (`Msg ("Bad hex: " ^ e))
|
||||||
end
|
end
|
||||||
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash ->
|
|> if_error ~status:`Bad_Request "Bad request" >>= fun hash ->
|
||||||
|
@ -619,11 +602,27 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||||
>>= fun () -> Dream.respond "" |> Lwt_result.ok
|
>>= fun () -> Dream.respond "" |> Lwt_result.ok
|
||||||
in
|
in
|
||||||
|
|
||||||
|
let redirect_parent req =
|
||||||
|
let queries = Dream.all_queries req in
|
||||||
|
let parent_url =
|
||||||
|
let parent_path =
|
||||||
|
Dream.target req
|
||||||
|
|> Utils.Path.of_url
|
||||||
|
|> List.rev |> List.tl |> List.rev
|
||||||
|
in
|
||||||
|
Utils.Path.to_url ~path:parent_path ~queries
|
||||||
|
in
|
||||||
|
Dream.redirect ~status:`Temporary_Redirect req parent_url
|
||||||
|
|> Lwt_result.ok
|
||||||
|
in
|
||||||
|
|
||||||
let w f req = or_error_response (f req) in
|
let w f req = or_error_response (f req) in
|
||||||
|
|
||||||
[
|
[
|
||||||
`Get, "/", (w (builds ~all:false ~filter_builds_later_than:expired_jobs));
|
`Get, "/", (w builds);
|
||||||
|
`Get, "/job", (w redirect_parent);
|
||||||
`Get, "/job/:job", (w job);
|
`Get, "/job/:job", (w job);
|
||||||
|
`Get, "/job/:job/build", (w redirect_parent);
|
||||||
`Get, "/job/:job/failed", (w job_with_failed);
|
`Get, "/job/:job/failed", (w job_with_failed);
|
||||||
`Get, "/job/:job/build/latest/**", (w redirect_latest);
|
`Get, "/job/:job/build/latest/**", (w redirect_latest);
|
||||||
`Get, "/job/:job/build/latest", (w redirect_latest_no_slash);
|
`Get, "/job/:job/build/latest", (w redirect_latest_no_slash);
|
||||||
|
@ -634,9 +633,8 @@ let routes ~datadir ~cachedir ~configdir ~expired_jobs =
|
||||||
`Get, "/job/:job/build/:build/vizdependencies", (w @@ job_build_viz `Dependencies);
|
`Get, "/job/:job/build/:build/vizdependencies", (w @@ job_build_viz `Dependencies);
|
||||||
`Get, "/job/:job/build/:build/script", (w (job_build_static_file `Script));
|
`Get, "/job/:job/build/:build/script", (w (job_build_static_file `Script));
|
||||||
`Get, "/job/:job/build/:build/console", (w (job_build_static_file `Console));
|
`Get, "/job/:job/build/:build/console", (w (job_build_static_file `Console));
|
||||||
`Get, "/job/:job/build/:build/all.tar.gz", (w job_build_targz);
|
|
||||||
`Get, "/failed-builds", (w failed_builds);
|
`Get, "/failed-builds", (w failed_builds);
|
||||||
`Get, "/all-builds", (w (builds ~all:true));
|
`Get, "/job/:job/build/:build/all.tar.gz", (w job_build_targz);
|
||||||
`Get, "/hash", (w hash);
|
`Get, "/hash", (w hash);
|
||||||
`Get, "/compare/:build_left/:build_right", (w compare_builds);
|
`Get, "/compare/:build_left/:build_right", (w compare_builds);
|
||||||
`Post, "/upload", (Authorization.authenticate (w upload));
|
`Post, "/upload", (Authorization.authenticate (w upload));
|
||||||
|
@ -672,7 +670,7 @@ module Middleware = struct
|
||||||
let queries = Dream.all_queries req in
|
let queries = Dream.all_queries req in
|
||||||
let url = Utils.Path.to_url ~path ~queries in
|
let url = Utils.Path.to_url ~path ~queries in
|
||||||
(*> Note: See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Location*)
|
(*> Note: See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Location*)
|
||||||
Dream.redirect ~status:`Moved_Permanently req url
|
Dream.redirect ~status:`Permanent_Redirect req url
|
||||||
| _ (* /... *) -> handler req
|
| _ (* /... *) -> handler req
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
142
lib/dream_tar.ml
142
lib/dream_tar.ml
|
@ -1,36 +1,47 @@
|
||||||
module High : sig
|
open Lwt.Infix
|
||||||
type t
|
|
||||||
type 'a s = 'a Lwt.t
|
|
||||||
|
|
||||||
external inj : 'a s -> ('a, t) Tar.io = "%identity"
|
module Writer = struct
|
||||||
external prj : ('a, t) Tar.io -> 'a s = "%identity"
|
type out_channel =
|
||||||
end = struct
|
{ mutable gz : Gz.Def.encoder
|
||||||
type t
|
; ic : Cstruct.t
|
||||||
type 'a s = 'a Lwt.t
|
; oc : Cstruct.t
|
||||||
|
; stream : Dream.stream }
|
||||||
|
|
||||||
external inj : 'a -> 'b = "%identity"
|
type 'a t = 'a Lwt.t
|
||||||
external prj : 'a -> 'b = "%identity"
|
|
||||||
|
let really_write ({ oc; stream; _ } as state) cs =
|
||||||
|
let rec until_await gz =
|
||||||
|
match Gz.Def.encode gz with
|
||||||
|
| `Await gz -> state.gz <- gz ; Lwt.return_unit
|
||||||
|
| `Flush gz ->
|
||||||
|
let max = Cstruct.length oc - Gz.Def.dst_rem gz in
|
||||||
|
let str = Cstruct.copy oc 0 max in
|
||||||
|
Dream.write stream str >>= fun () ->
|
||||||
|
let { Cstruct.buffer; off= cs_off; len= cs_len; } = oc in
|
||||||
|
until_await (Gz.Def.dst gz buffer cs_off cs_len)
|
||||||
|
| `End _gz -> assert false in
|
||||||
|
if Cstruct.length cs = 0
|
||||||
|
then Lwt.return_unit
|
||||||
|
else ( let { Cstruct.buffer; off; len; } = cs in
|
||||||
|
let gz = Gz.Def.src state.gz buffer off len in
|
||||||
|
until_await gz )
|
||||||
end
|
end
|
||||||
|
|
||||||
let value v = Tar.High (High.inj v)
|
module HW = Tar.HeaderWriter(Lwt)(Writer)
|
||||||
|
|
||||||
let ok_value v = value (Lwt_result.ok v)
|
let write_block (header : Tar.Header.t) lpath ({ Writer.ic= buf; _ } as state) =
|
||||||
|
HW.write ~level:Tar.Header.Ustar header state >>= fun () ->
|
||||||
let run t stream =
|
Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string lpath) >>= fun ic ->
|
||||||
let rec run : type a. (a, 'err, High.t) Tar.t -> (a, 'err) result Lwt.t =
|
let rec loop () =
|
||||||
function
|
let { Cstruct.buffer; off; len; } = buf in
|
||||||
| Tar.Write str ->
|
Lwt_io.read_into_bigstring ic buffer off len >>= function
|
||||||
(* Can this not fail?!? Obviously, it can, but we never know?? *)
|
| 0 -> Lwt.return ()
|
||||||
Lwt_result.ok (Dream.write stream str)
|
| len' ->
|
||||||
| Tar.Seek _ | Tar.Read _ | Tar.Really_read _ -> assert false
|
Writer.really_write state (Cstruct.sub buf 0 len') >>= fun () ->
|
||||||
| Tar.Return value -> Lwt.return value
|
loop ()
|
||||||
| Tar.High value -> High.prj value
|
|
||||||
| Tar.Bind (x, f) ->
|
|
||||||
let open Lwt_result.Syntax in
|
|
||||||
let* v = run x in
|
|
||||||
run (f v)
|
|
||||||
in
|
in
|
||||||
run t
|
loop () >>= fun () ->
|
||||||
|
Writer.really_write state (Tar.Header.zero_padding header)
|
||||||
|
|
||||||
let header_of_file mod_time (file : Builder_db.file) =
|
let header_of_file mod_time (file : Builder_db.file) =
|
||||||
let file_mode = if Fpath.is_prefix Fpath.(v "bin/") file.filepath then
|
let file_mode = if Fpath.is_prefix Fpath.(v "bin/") file.filepath then
|
||||||
|
@ -40,53 +51,38 @@ let header_of_file mod_time (file : Builder_db.file) =
|
||||||
in
|
in
|
||||||
Tar.Header.make ~file_mode ~mod_time (Fpath.to_string file.filepath) (Int64.of_int file.size)
|
Tar.Header.make ~file_mode ~mod_time (Fpath.to_string file.filepath) (Int64.of_int file.size)
|
||||||
|
|
||||||
let contents datadir file : unit -> (string option, _, _) Tar.t =
|
let targz_response datadir finish (files : Builder_db.file list) (stream : Dream.stream) =
|
||||||
let state = ref `Initial in
|
let state =
|
||||||
let dispenser () =
|
let ic = Cstruct.create (4 * 4 * 1024) in
|
||||||
let ( let* ) = Tar.( let* ) in
|
let oc = Cstruct.create 4096 in
|
||||||
let src = Fpath.append datadir (Model.artifact_path file) in
|
let gz =
|
||||||
let* state' =
|
let w = De.Lz77.make_window ~bits:15 in
|
||||||
match !state with
|
let q = De.Queue.create 0x1000 in
|
||||||
| `Initial ->
|
let mtime = Int32.of_float (Unix.gettimeofday ()) in
|
||||||
let* fd = ok_value (Lwt_io.open_file ~mode:Lwt_io.Input (Fpath.to_string src)) in
|
let gz = Gz.Def.encoder `Manual `Manual ~mtime Gz.Unix ~q ~w ~level:4 in
|
||||||
let s = `Active fd in
|
let { Cstruct.buffer; off; len; } = oc in
|
||||||
state := s; Tar.return (Ok s)
|
Gz.Def.dst gz buffer off len
|
||||||
| `Active _ | `Closed as s -> Tar.return (Ok s)
|
|
||||||
in
|
in
|
||||||
match state' with
|
{ Writer.gz; ic; oc; stream; }
|
||||||
| `Closed -> Tar.return (Ok None)
|
|
||||||
| `Active fd ->
|
|
||||||
let* data = ok_value (Lwt_io.read ~count:65536 fd) in
|
|
||||||
if String.length data = 0 then begin
|
|
||||||
state := `Closed;
|
|
||||||
let* () = ok_value (Lwt_io.close fd) in
|
|
||||||
Tar.return (Ok None)
|
|
||||||
end else
|
|
||||||
Tar.return (Ok (Some data))
|
|
||||||
in
|
in
|
||||||
dispenser
|
Lwt_list.iter_s (fun file ->
|
||||||
|
|
||||||
let entries datadir finish files =
|
|
||||||
let files =
|
|
||||||
List.map (fun file ->
|
|
||||||
let hdr = header_of_file finish file in
|
let hdr = header_of_file finish file in
|
||||||
let level = Some Tar.Header.Posix in
|
write_block hdr Fpath.(datadir // file.localpath) state)
|
||||||
(level, hdr, contents datadir file)
|
files >>= fun () ->
|
||||||
)
|
Writer.really_write state Tar.Header.zero_block >>= fun () ->
|
||||||
files
|
Writer.really_write state Tar.Header.zero_block >>= fun () ->
|
||||||
|
(* assert (Gz.Def.encode gz = `Await) *)
|
||||||
|
let rec until_end gz = match Gz.Def.encode gz with
|
||||||
|
| `Await _gz -> assert false
|
||||||
|
| `Flush gz | `End gz as flush_or_end ->
|
||||||
|
let max = Cstruct.length state.oc - Gz.Def.dst_rem gz in
|
||||||
|
let str = Cstruct.copy state.oc 0 max in
|
||||||
|
Dream.write stream str >>= fun () -> match flush_or_end with
|
||||||
|
| `Flush gz ->
|
||||||
|
let { Cstruct.buffer; off= cs_off; len= cs_len; } = state.oc in
|
||||||
|
until_end (Gz.Def.dst gz buffer cs_off cs_len)
|
||||||
|
| `End _ -> Lwt.return_unit
|
||||||
in
|
in
|
||||||
let files = ref files in
|
until_end (Gz.Def.src state.gz De.bigstring_empty 0 0) >>= fun () ->
|
||||||
fun () -> match !files with
|
Dream.flush stream >>= fun () ->
|
||||||
| [] -> Tar.return (Ok None)
|
Dream.close stream
|
||||||
| f :: fs -> files := fs; Tar.return (Ok (Some f))
|
|
||||||
|
|
||||||
let targz_response datadir finish files stream =
|
|
||||||
let entries : (_, _) Tar.entries = entries datadir finish files in
|
|
||||||
let global_hdr =
|
|
||||||
Tar.Header.Extended.make
|
|
||||||
~comment:"Tar file produced by builder-web.%%VERSION_NUM%%"
|
|
||||||
()
|
|
||||||
in
|
|
||||||
let finish32 = Int64.to_int32 finish in
|
|
||||||
Logs.err (fun m -> m "finished at %ld (%Ld)" finish32 finish);
|
|
||||||
run (Tar_gz.out_gzipped ~level:9 ~mtime:finish32 Gz.Unix (Tar.out ~global_hdr entries)) stream
|
|
||||||
|
|
22
lib/dune
22
lib/dune
|
@ -1,5 +1,21 @@
|
||||||
(library
|
(library
|
||||||
(name builder_web)
|
(name builder_web)
|
||||||
(libraries builder builder_db dream tyxml bos duration ohex caqti-lwt
|
(libraries
|
||||||
opamdiff ptime.clock.os cmarkit tar tar.gz tar-unix owee solo5-elftool decompress.de
|
builder
|
||||||
decompress.gz uri digestif))
|
builder_db
|
||||||
|
dream
|
||||||
|
tyxml
|
||||||
|
bos
|
||||||
|
duration
|
||||||
|
hex
|
||||||
|
caqti-lwt
|
||||||
|
opamdiff
|
||||||
|
ptime.clock.os
|
||||||
|
omd
|
||||||
|
tar
|
||||||
|
owee
|
||||||
|
solo5-elftool
|
||||||
|
decompress.de
|
||||||
|
decompress.gz
|
||||||
|
uri
|
||||||
|
))
|
||||||
|
|
133
lib/model.ml
133
lib/model.ml
|
@ -19,14 +19,6 @@ let not_found = function
|
||||||
| Some v -> Lwt_result.return v
|
| Some v -> Lwt_result.return v
|
||||||
|
|
||||||
let staging datadir = Fpath.(datadir / "_staging")
|
let staging datadir = Fpath.(datadir / "_staging")
|
||||||
let artifact_path artifact =
|
|
||||||
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
|
|
||||||
(* NOTE: [sha256] is 64 characters when it's a hex sha256 checksum *)
|
|
||||||
(* NOTE: We add the prefix to reduce the number of files in a directory - a
|
|
||||||
workaround for inferior filesystems. We can easily revert this by changing
|
|
||||||
this function and adding a migration. *)
|
|
||||||
let prefix = String.sub sha256 0 2 in
|
|
||||||
Fpath.(v "_artifacts" / prefix / sha256)
|
|
||||||
|
|
||||||
let read_file datadir filepath =
|
let read_file datadir filepath =
|
||||||
let filepath = Fpath.(datadir // filepath) in
|
let filepath = Fpath.(datadir // filepath) in
|
||||||
|
@ -42,7 +34,7 @@ let read_file datadir filepath =
|
||||||
Log.warn (fun m -> m "Error reading local file %a: %s"
|
Log.warn (fun m -> m "Error reading local file %a: %s"
|
||||||
Fpath.pp filepath (Unix.error_message e));
|
Fpath.pp filepath (Unix.error_message e));
|
||||||
Lwt.return_error (`File_error filepath)
|
Lwt.return_error (`File_error filepath)
|
||||||
| e -> Lwt.reraise e)
|
| e -> Lwt.fail e)
|
||||||
|
|
||||||
let build_artifact build filepath (module Db : CONN) =
|
let build_artifact build filepath (module Db : CONN) =
|
||||||
Db.find_opt Builder_db.Build_artifact.get_by_build_uuid (build, filepath)
|
Db.find_opt Builder_db.Build_artifact.get_by_build_uuid (build, filepath)
|
||||||
|
@ -52,14 +44,14 @@ let build_artifact_by_id id (module Db : CONN) =
|
||||||
Db.find Builder_db.Build_artifact.get id
|
Db.find Builder_db.Build_artifact.get id
|
||||||
|
|
||||||
let build_artifact_data datadir file =
|
let build_artifact_data datadir file =
|
||||||
read_file datadir (artifact_path file)
|
read_file datadir file.Builder_db.localpath
|
||||||
|
|
||||||
let build_artifacts build (module Db : CONN) =
|
let build_artifacts build (module Db : CONN) =
|
||||||
Db.collect_list Builder_db.Build_artifact.get_all_by_build build >|=
|
Db.collect_list Builder_db.Build_artifact.get_all_by_build build >|=
|
||||||
List.map snd
|
List.map snd
|
||||||
|
|
||||||
let solo5_manifest datadir file =
|
let solo5_manifest datadir file =
|
||||||
let buf = Owee_buf.map_binary Fpath.(to_string (datadir // artifact_path file)) in
|
let buf = Owee_buf.map_binary Fpath.(to_string (datadir // file.Builder_db.localpath)) in
|
||||||
Solo5_elftool.query_manifest buf |> Result.to_option
|
Solo5_elftool.query_manifest buf |> Result.to_option
|
||||||
|
|
||||||
let platforms_of_job id (module Db : CONN) =
|
let platforms_of_job id (module Db : CONN) =
|
||||||
|
@ -204,42 +196,46 @@ let cleanup_staging datadir (module Db : Caqti_lwt.CONNECTION) =
|
||||||
(cleanup_staged staged))
|
(cleanup_staged staged))
|
||||||
stageds
|
stageds
|
||||||
|
|
||||||
let save path data =
|
let save file data =
|
||||||
let open Lwt.Infix in
|
let open Lwt.Infix in
|
||||||
Lwt.catch
|
Lwt.catch
|
||||||
(fun () ->
|
(fun () ->
|
||||||
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string path) >>= fun oc ->
|
Lwt_io.open_file ~mode:Lwt_io.Output (Fpath.to_string file) >>= fun oc ->
|
||||||
Lwt_io.write oc data >>= fun () ->
|
Lwt_io.write oc data >>= fun () ->
|
||||||
Lwt_io.close oc
|
Lwt_io.close oc
|
||||||
|> Lwt_result.ok)
|
|> Lwt_result.ok)
|
||||||
(function
|
(function
|
||||||
| Unix.Unix_error (e, _, _) ->
|
| Unix.Unix_error (e, _, _) ->
|
||||||
Lwt_result.fail (`Msg (Unix.error_message e))
|
Lwt_result.fail (`Msg (Unix.error_message e))
|
||||||
| e -> Lwt.reraise e)
|
| e -> Lwt.fail e)
|
||||||
|
|
||||||
let save_artifacts staging artifacts =
|
let save_file dir staging (filepath, data) =
|
||||||
List.fold_left
|
let size = String.length data in
|
||||||
(fun r (file, data) ->
|
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||||
r >>= fun () ->
|
let localpath = Fpath.append dir filepath in
|
||||||
let sha256 = Ohex.encode file.Builder_db.sha256 in
|
let destpath = Fpath.append staging filepath in
|
||||||
let destpath = Fpath.(staging / sha256) in
|
Lwt_result.lift (Bos.OS.Dir.create (Fpath.parent destpath)) >>= fun _ ->
|
||||||
save destpath data)
|
save destpath data >|= fun () ->
|
||||||
(Lwt_result.return ())
|
{ Builder_db.filepath; localpath; sha256; size }
|
||||||
artifacts
|
|
||||||
|
|
||||||
let commit_files datadir staging_dir job_name uuid artifacts =
|
let save_files dir staging files =
|
||||||
(* First we move the artifacts *)
|
|
||||||
List.fold_left
|
List.fold_left
|
||||||
(fun r artifact ->
|
(fun r file ->
|
||||||
r >>= fun () ->
|
r >>= fun acc ->
|
||||||
let sha256 = Ohex.encode artifact.Builder_db.sha256 in
|
save_file dir staging file >>= fun file ->
|
||||||
let src = Fpath.(staging_dir / sha256) in
|
Lwt_result.return (file :: acc))
|
||||||
let dest = Fpath.(datadir // artifact_path artifact) in
|
(Lwt_result.return [])
|
||||||
Lwt.return (Bos.OS.Dir.create (Fpath.parent dest)) >>= fun _created ->
|
files
|
||||||
Lwt.return (Bos.OS.Path.move ~force:true src dest))
|
|
||||||
(Lwt_result.return ())
|
let save_all staging_dir (job : Builder.script_job) uuid artifacts =
|
||||||
artifacts >>= fun () ->
|
let build_dir = Fpath.(v job.Builder.name / Uuidm.to_string uuid) in
|
||||||
(* Now the staging dir only contains script & console *)
|
let output_dir = Fpath.(build_dir / "output")
|
||||||
|
and staging_output_dir = Fpath.(staging_dir / "output") in
|
||||||
|
Lwt.return (Bos.OS.Dir.create staging_output_dir) >>= fun _ ->
|
||||||
|
save_files output_dir staging_output_dir artifacts >>= fun artifacts ->
|
||||||
|
Lwt_result.return artifacts
|
||||||
|
|
||||||
|
let commit_files datadir staging_dir job_name uuid =
|
||||||
let job_dir = Fpath.(datadir / job_name) in
|
let job_dir = Fpath.(datadir / job_name) in
|
||||||
let dest = Fpath.(job_dir / Uuidm.to_string uuid) in
|
let dest = Fpath.(job_dir / Uuidm.to_string uuid) in
|
||||||
Lwt.return (Bos.OS.Dir.create job_dir) >>= fun _ ->
|
Lwt.return (Bos.OS.Dir.create job_dir) >>= fun _ ->
|
||||||
|
@ -253,15 +249,8 @@ let infer_section_and_synopsis artifacts =
|
||||||
in
|
in
|
||||||
let infer_section switch root =
|
let infer_section switch root =
|
||||||
let root_pkg = root.OpamPackage.name in
|
let root_pkg = root.OpamPackage.name in
|
||||||
let is_unikernel =
|
|
||||||
(* since mirage 4.2.0, the x-mirage-opam-lock-location is emitted *)
|
|
||||||
Option.value ~default:false
|
|
||||||
(Option.map (fun opam ->
|
|
||||||
Option.is_some (OpamFile.OPAM.extended opam "x-mirage-opam-lock-location" Fun.id))
|
|
||||||
(OpamPackage.Name.Map.find_opt root_pkg switch.OpamFile.SwitchExport.overlays))
|
|
||||||
in
|
|
||||||
let root_pkg_name = OpamPackage.Name.to_string root_pkg in
|
let root_pkg_name = OpamPackage.Name.to_string root_pkg in
|
||||||
if is_unikernel || Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
|
if Astring.String.is_prefix ~affix:"mirage-unikernel-" root_pkg_name then
|
||||||
let metrics_influx =
|
let metrics_influx =
|
||||||
let influx = OpamPackage.Name.of_string "metrics-influx" in
|
let influx = OpamPackage.Name.of_string "metrics-influx" in
|
||||||
OpamPackage.Set.exists (fun p -> OpamPackage.Name.equal p.OpamPackage.name influx)
|
OpamPackage.Set.exists (fun p -> OpamPackage.Name.equal p.OpamPackage.name influx)
|
||||||
|
@ -306,8 +295,7 @@ let compute_input_id artifacts =
|
||||||
get_hash (Fpath.v "build-environment"),
|
get_hash (Fpath.v "build-environment"),
|
||||||
get_hash (Fpath.v "system-packages")
|
get_hash (Fpath.v "system-packages")
|
||||||
with
|
with
|
||||||
| Some a, Some b, Some c ->
|
| Some a, Some b, Some c -> Some (Mirage_crypto.Hash.SHA256.digest (Cstruct.concat [a;b;c]))
|
||||||
Some Digestif.SHA256.(to_raw_string (digestv_string [a;b;c]))
|
|
||||||
| _ -> None
|
| _ -> None
|
||||||
|
|
||||||
let save_console_and_script staging_dir job_name uuid console script =
|
let save_console_and_script staging_dir job_name uuid console script =
|
||||||
|
@ -329,25 +317,6 @@ let prepare_staging staging_dir =
|
||||||
then Lwt_result.fail (`Msg "build directory already exists")
|
then Lwt_result.fail (`Msg "build directory already exists")
|
||||||
else Lwt_result.return ()
|
else Lwt_result.return ()
|
||||||
|
|
||||||
(* saving:
|
|
||||||
- for each artifact compute its sha256 checksum -- calling Lwt.pause in
|
|
||||||
between
|
|
||||||
- lookup artifact sha256 in the database and filter them out of the list: not_in_db
|
|
||||||
- mkdir -p _staging/uuid/
|
|
||||||
- save console & script to _staging/uuid/
|
|
||||||
- save each artifact in not_in_db as _staging/uuid/sha256
|
|
||||||
committing:
|
|
||||||
- for each artifact mv _staging/uuid/sha256 _artifacts/sha256
|
|
||||||
(or _artifacts/prefix(sha256)/sha256 where prefix(sha256) is the first two hex digits in sha256)
|
|
||||||
- now _staging/uuid only contains console & script so we mv _staging/uuid _staging/job/uuid
|
|
||||||
potential issues:
|
|
||||||
- race condition in uploading same artifact:
|
|
||||||
* if the artifact already exists in the database and thus filesystem then nothing is done
|
|
||||||
* if the artifact is added to the database and/or filesystem we atomically overwrite it
|
|
||||||
- input_id depends on a sort order?
|
|
||||||
*)
|
|
||||||
|
|
||||||
|
|
||||||
let add_build
|
let add_build
|
||||||
~datadir
|
~datadir
|
||||||
~cachedir
|
~cachedir
|
||||||
|
@ -368,35 +337,16 @@ let add_build
|
||||||
e)
|
e)
|
||||||
x
|
x
|
||||||
in
|
in
|
||||||
|
let artifacts_to_preserve =
|
||||||
let not_interesting p =
|
let not_interesting p =
|
||||||
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes"
|
String.equal (Fpath.basename p) "README.md" || String.equal (Fpath.get_ext p) ".build-hashes"
|
||||||
in
|
in
|
||||||
begin
|
List.filter (fun (p, _) -> not (not_interesting p)) raw_artifacts
|
||||||
List.fold_left
|
in
|
||||||
(fun r (filepath, data) ->
|
|
||||||
r >>= fun acc ->
|
|
||||||
if not_interesting filepath then
|
|
||||||
Lwt_result.return acc
|
|
||||||
else
|
|
||||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data))
|
|
||||||
and size = String.length data in
|
|
||||||
Lwt_result.ok (Lwt.pause ()) >|= fun () ->
|
|
||||||
({ filepath; sha256; size }, data) :: acc)
|
|
||||||
(Lwt_result.return [])
|
|
||||||
raw_artifacts
|
|
||||||
end >>= fun artifacts ->
|
|
||||||
or_cleanup (prepare_staging staging_dir) >>= fun () ->
|
or_cleanup (prepare_staging staging_dir) >>= fun () ->
|
||||||
or_cleanup (save_console_and_script staging_dir job_name uuid console job.Builder.script)
|
or_cleanup (save_console_and_script staging_dir job_name uuid console job.Builder.script)
|
||||||
>>= fun (console, script) ->
|
>>= fun (console, script) ->
|
||||||
List.fold_left
|
or_cleanup (save_all staging_dir job uuid artifacts_to_preserve) >>= fun artifacts ->
|
||||||
(fun r ((f, _) as artifact) ->
|
|
||||||
r >>= fun acc ->
|
|
||||||
Db.find Builder_db.Build_artifact.exists f.sha256 >|= fun exists ->
|
|
||||||
if exists then acc else artifact :: acc)
|
|
||||||
(Lwt_result.return [])
|
|
||||||
artifacts >>= fun artifacts_to_save ->
|
|
||||||
or_cleanup (save_artifacts staging_dir artifacts_to_save) >>= fun () ->
|
|
||||||
let artifacts = List.map fst artifacts in
|
|
||||||
let r =
|
let r =
|
||||||
Db.start () >>= fun () ->
|
Db.start () >>= fun () ->
|
||||||
Db.exec Job.try_add job_name >>= fun () ->
|
Db.exec Job.try_add job_name >>= fun () ->
|
||||||
|
@ -464,8 +414,8 @@ let add_build
|
||||||
Db.exec Build_artifact.add (file, id))
|
Db.exec Build_artifact.add (file, id))
|
||||||
(Lwt_result.return ())
|
(Lwt_result.return ())
|
||||||
remaining_artifacts_to_add >>= fun () ->
|
remaining_artifacts_to_add >>= fun () ->
|
||||||
commit_files datadir staging_dir job_name uuid (List.map fst artifacts_to_save) >>= fun () ->
|
Db.commit () >>= fun () ->
|
||||||
Db.commit () >|= fun () ->
|
commit_files datadir staging_dir job_name uuid >|= fun () ->
|
||||||
main_binary
|
main_binary
|
||||||
in
|
in
|
||||||
Lwt_result.bind_lwt_error (or_cleanup r)
|
Lwt_result.bind_lwt_error (or_cleanup r)
|
||||||
|
@ -484,7 +434,7 @@ let add_build
|
||||||
and uuid = Uuidm.to_string uuid
|
and uuid = Uuidm.to_string uuid
|
||||||
and job = job.name
|
and job = job.name
|
||||||
and platform = job.platform
|
and platform = job.platform
|
||||||
and sha256 = Ohex.encode main_binary.sha256
|
and `Hex sha256 = Hex.of_cstruct main_binary.sha256
|
||||||
in
|
in
|
||||||
let fp_str p = Fpath.(to_string (datadir // p)) in
|
let fp_str p = Fpath.(to_string (datadir // p)) in
|
||||||
let args =
|
let args =
|
||||||
|
@ -494,8 +444,7 @@ let add_build
|
||||||
"--uuid=" ^ uuid ; "--platform=" ^ platform ;
|
"--uuid=" ^ uuid ; "--platform=" ^ platform ;
|
||||||
"--cache-dir=" ^ Fpath.to_string cachedir ;
|
"--cache-dir=" ^ Fpath.to_string cachedir ;
|
||||||
"--data-dir=" ^ Fpath.to_string datadir ;
|
"--data-dir=" ^ Fpath.to_string datadir ;
|
||||||
"--main-binary-filepath=" ^ Fpath.to_string main_binary.filepath ;
|
fp_str main_binary.localpath ])
|
||||||
fp_str Fpath.(datadir // artifact_path main_binary) ])
|
|
||||||
in
|
in
|
||||||
Log.debug (fun m -> m "executing hooks with %s" args);
|
Log.debug (fun m -> m "executing hooks with %s" args);
|
||||||
let dir = Fpath.(configdir / "upload-hooks") in
|
let dir = Fpath.(configdir / "upload-hooks") in
|
||||||
|
|
|
@ -5,7 +5,6 @@ val pp_error : Format.formatter -> error -> unit
|
||||||
val not_found : 'a option -> ('a, [> `Not_found ]) result Lwt.t
|
val not_found : 'a option -> ('a, [> `Not_found ]) result Lwt.t
|
||||||
|
|
||||||
val staging : Fpath.t -> Fpath.t
|
val staging : Fpath.t -> Fpath.t
|
||||||
val artifact_path : Builder_db.file -> Fpath.t
|
|
||||||
|
|
||||||
val cleanup_staging : Fpath.t -> Caqti_lwt.connection ->
|
val cleanup_staging : Fpath.t -> Caqti_lwt.connection ->
|
||||||
(unit, [> `Msg of string ]) result Lwt.t
|
(unit, [> `Msg of string ]) result Lwt.t
|
||||||
|
@ -31,9 +30,9 @@ val build : Uuidm.t -> Caqti_lwt.connection ->
|
||||||
([`build] Builder_db.id * Builder_db.Build.t, [> error ]) result Lwt.t
|
([`build] Builder_db.id * Builder_db.Build.t, [> error ]) result Lwt.t
|
||||||
|
|
||||||
val build_with_main_binary : [`job] Builder_db.id -> string -> Caqti_lwt.connection ->
|
val build_with_main_binary : [`job] Builder_db.id -> string -> Caqti_lwt.connection ->
|
||||||
((Builder_db.Build.t * Builder_db.file) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
((Builder_db.Build.t * Builder_db.file option) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||||
|
|
||||||
val build_hash : string -> Caqti_lwt.connection ->
|
val build_hash : Cstruct.t -> Caqti_lwt.connection ->
|
||||||
((string * Builder_db.Build.t) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
((string * Builder_db.Build.t) option, [> Caqti_error.call_or_retrieve ]) result Lwt.t
|
||||||
|
|
||||||
val build_exists : Uuidm.t -> Caqti_lwt.connection ->
|
val build_exists : Uuidm.t -> Caqti_lwt.connection ->
|
||||||
|
|
97
lib/utils.ml
97
lib/utils.ml
|
@ -45,30 +45,85 @@ let compare_pkgs p1 p2 =
|
||||||
in
|
in
|
||||||
diff_map (parse_pkgs p1) (parse_pkgs p2)
|
diff_map (parse_pkgs p1) (parse_pkgs p2)
|
||||||
|
|
||||||
let md_to_html ?adjust_heading ?(safe = true) data =
|
module Omd = struct
|
||||||
let open Cmarkit in
|
|
||||||
let doc = Doc.of_string ~strict:false ~heading_auto_ids:true data in
|
let make_safe omd =
|
||||||
let doc =
|
let rec safe_block = function
|
||||||
Option.fold ~none:doc
|
| Omd.Paragraph (attr, inline) ->
|
||||||
~some:(fun lvl ->
|
safe_inline inline
|
||||||
let block _m = function
|
|> Option.map (fun inline -> Omd.Paragraph (attr, inline))
|
||||||
| Block.Heading (h, meta) ->
|
| Omd.List (attr, typ, spacing, blocks) ->
|
||||||
let open Block.Heading in
|
let blocks = List.filter_map (fun b ->
|
||||||
let level = level h
|
let b = List.filter_map safe_block b in
|
||||||
and id = id h
|
if b = [] then None else Some b)
|
||||||
and layout = layout h
|
blocks
|
||||||
and inline = inline h
|
|
||||||
in
|
in
|
||||||
let h' = make ?id ~layout ~level:(level + lvl) inline in
|
if blocks = [] then None else
|
||||||
Mapper.ret (Block.Heading (h', meta))
|
Some (Omd.List (attr, typ, spacing, blocks))
|
||||||
| Block.Blocks _ -> Mapper.default
|
| Omd.Blockquote (attr, blocks) ->
|
||||||
| x -> Mapper.ret x
|
let blocks = List.filter_map safe_block blocks in
|
||||||
|
if blocks = [] then None else
|
||||||
|
Some (Omd.Blockquote (attr, blocks))
|
||||||
|
| Omd.Heading (attr, level, inline) ->
|
||||||
|
safe_inline inline
|
||||||
|
|> Option.map (fun inline -> Omd.Heading (attr, level, inline))
|
||||||
|
| Omd.Html_block _ -> None
|
||||||
|
| Omd.Definition_list (attr, def_elts) ->
|
||||||
|
let def_elts = List.filter_map safe_def_elts def_elts in
|
||||||
|
if def_elts = [] then None else
|
||||||
|
Some (Omd.Definition_list (attr, def_elts))
|
||||||
|
| Omd.Code_block _
|
||||||
|
| Omd.Thematic_break _ as v -> Some v
|
||||||
|
and safe_def_elts { term ; defs } =
|
||||||
|
let defs = List.filter_map safe_inline defs in
|
||||||
|
safe_inline term
|
||||||
|
|> Option.map (fun term -> { Omd.term ; defs })
|
||||||
|
and safe_inline = function
|
||||||
|
| Concat (attr, inline) ->
|
||||||
|
Some (Concat (attr, List.filter_map safe_inline inline))
|
||||||
|
| Emph (attr, inline) ->
|
||||||
|
safe_inline inline
|
||||||
|
|> Option.map (fun inline -> Omd.Emph (attr, inline))
|
||||||
|
| Strong (attr, inline) ->
|
||||||
|
safe_inline inline
|
||||||
|
|> Option.map (fun inline -> Omd.Strong (attr, inline))
|
||||||
|
| Link (attr, link) ->
|
||||||
|
begin match safe_link link with
|
||||||
|
| `No_label | `Relative -> safe_inline link.Omd.label
|
||||||
|
| `Link l -> Some (Omd.Link (attr, l))
|
||||||
|
end
|
||||||
|
| Image (attr, link) ->
|
||||||
|
begin match safe_link link with
|
||||||
|
| `No_label | `Relative -> None
|
||||||
|
| `Link l -> Some (Omd.Image (attr, l))
|
||||||
|
end
|
||||||
|
| Html _ -> None
|
||||||
|
| Text _
|
||||||
|
| Code _
|
||||||
|
| Hard_break _
|
||||||
|
| Soft_break _ as v -> Some v
|
||||||
|
and safe_link ({ label ; destination ; _ } as l) =
|
||||||
|
let absolute_link =
|
||||||
|
String.(length destination >= 2 && equal (sub destination 0 2) "//") ||
|
||||||
|
String.(length destination >= 7 && equal (sub destination 0 7) "http://") ||
|
||||||
|
String.(length destination >= 8 && equal (sub destination 0 8) "https://")
|
||||||
in
|
in
|
||||||
let mapper = Mapper.make ~block () in
|
if absolute_link then
|
||||||
Mapper.map_doc mapper doc)
|
match safe_inline label with
|
||||||
adjust_heading
|
| None -> `No_label
|
||||||
|
| Some label -> `Link { l with label }
|
||||||
|
else
|
||||||
|
`Relative
|
||||||
in
|
in
|
||||||
Cmarkit_html.of_doc ~safe doc
|
List.filter_map safe_block omd
|
||||||
|
|
||||||
|
let html_of_string markdown =
|
||||||
|
markdown
|
||||||
|
|> Omd.of_string
|
||||||
|
|> make_safe
|
||||||
|
|> Omd.to_html
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
module Path = struct
|
module Path = struct
|
||||||
|
|
||||||
|
|
54
lib/views.ml
54
lib/views.ml
|
@ -188,7 +188,7 @@ let artifact
|
||||||
~basename
|
~basename
|
||||||
~job_name
|
~job_name
|
||||||
~build
|
~build
|
||||||
~file:{ Builder_db.filepath; sha256; size }
|
~file:{ Builder_db.filepath; localpath = _; sha256; size }
|
||||||
=
|
=
|
||||||
let artifact_link =
|
let artifact_link =
|
||||||
Link.Job_build_artifact.make
|
Link.Job_build_artifact.make
|
||||||
|
@ -202,7 +202,7 @@ let artifact
|
||||||
else txtf "%a" Fpath.pp filepath
|
else txtf "%a" Fpath.pp filepath
|
||||||
];
|
];
|
||||||
H.txt " ";
|
H.txt " ";
|
||||||
H.code [txtf "SHA256:%s" (Ohex.encode sha256)];
|
H.code [txtf "SHA256:%a" Hex.pp (Hex.of_cstruct sha256)];
|
||||||
txtf " (%a)" Fmt.byte_size size;
|
txtf " (%a)" Fmt.byte_size size;
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -274,9 +274,9 @@ The filename suffix of the unikernel binary indicate the expected execution envi
|
||||||
|
|
||||||
A persistent link to the latest successful build is available as
|
A persistent link to the latest successful build is available as
|
||||||
`/job/*jobname*/build/latest/`. Each build can be reproduced with
|
`/job/*jobname*/build/latest/`. Each build can be reproduced with
|
||||||
[orb](https://github.com/robur-coop/orb/). The builds are scheduled and executed
|
[orb](https://github.com/roburio/orb/). The builds are scheduled and executed
|
||||||
daily by [builder](https://github.com/robur-coop/builder/). This web interface is
|
daily by [builder](https://github.com/roburio/builder/). This web interface is
|
||||||
[builder-web](https://git.robur.coop/robur/builder-web/). Read further information
|
[builder-web](https://git.robur.io/robur/builder-web/). Read further information
|
||||||
[on our project page](https://robur.coop/Projects/Reproducible_builds). This
|
[on our project page](https://robur.coop/Projects/Reproducible_builds). This
|
||||||
work has been funded by the European Union under the
|
work has been funded by the European Union under the
|
||||||
[NGI Pointer](https://pointer.ngi.eu) program. Contact team ATrobur.coop if you
|
[NGI Pointer](https://pointer.ngi.eu) program. Contact team ATrobur.coop if you
|
||||||
|
@ -285,7 +285,7 @@ have questions or suggestions.
|
||||||
|
|
||||||
let make_header =
|
let make_header =
|
||||||
[
|
[
|
||||||
H.Unsafe.data (Utils.md_to_html data);
|
H.Unsafe.data (Utils.Omd.html_of_string data);
|
||||||
H.form ~a:H.[a_action "/hash"; a_method `Get] [
|
H.form ~a:H.[a_action "/hash"; a_method `Get] [
|
||||||
H.label [
|
H.label [
|
||||||
H.txt "Search artifact by SHA256";
|
H.txt "Search artifact by SHA256";
|
||||||
|
@ -319,13 +319,18 @@ have questions or suggestions.
|
||||||
~build:latest_build.Builder_db.Build.uuid ()]
|
~build:latest_build.Builder_db.Build.uuid ()]
|
||||||
[txtf "%a" pp_ptime latest_build.Builder_db.Build.start];
|
[txtf "%a" pp_ptime latest_build.Builder_db.Build.start];
|
||||||
H.txt " ";
|
H.txt " ";
|
||||||
|
|
||||||
]
|
]
|
||||||
@ artifact
|
@ (match latest_artifact with
|
||||||
|
| Some main_binary ->
|
||||||
|
artifact
|
||||||
~basename:true
|
~basename:true
|
||||||
~job_name
|
~job_name
|
||||||
~build:latest_build
|
~build:latest_build
|
||||||
~file:latest_artifact
|
~file:main_binary
|
||||||
|
| None ->
|
||||||
|
[ txtf "Build failure: %a" Builder.pp_execution_result
|
||||||
|
latest_build.Builder_db.Build.result ]
|
||||||
|
)
|
||||||
@ [ H.br () ]
|
@ [ H.br () ]
|
||||||
|
|
||||||
let make_jobs jobs =
|
let make_jobs jobs =
|
||||||
|
@ -356,23 +361,14 @@ have questions or suggestions.
|
||||||
H.txt "View the latest failed builds ";
|
H.txt "View the latest failed builds ";
|
||||||
H.a ~a:H.[a_href "/failed-builds"]
|
H.a ~a:H.[a_href "/failed-builds"]
|
||||||
[H.txt "here"];
|
[H.txt "here"];
|
||||||
H.txt ".";
|
H.txt "."
|
||||||
]]
|
]]
|
||||||
|
|
||||||
let make_all_or_active all =
|
let make section_job_map =
|
||||||
[ H.p [
|
|
||||||
H.txt (if all then "View active jobs " else "View all jobs ");
|
|
||||||
H.a ~a:H.[a_href (if all then "/" else "/all-builds")]
|
|
||||||
[H.txt "here"];
|
|
||||||
H.txt ".";
|
|
||||||
]]
|
|
||||||
|
|
||||||
let make ~all section_job_map =
|
|
||||||
layout ~title:"Reproducible OPAM builds"
|
layout ~title:"Reproducible OPAM builds"
|
||||||
(make_header
|
(make_header
|
||||||
@ make_body section_job_map
|
@ make_body section_job_map
|
||||||
@ make_failed_builds
|
@ make_failed_builds)
|
||||||
@ make_all_or_active all)
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -387,7 +383,7 @@ module Job = struct
|
||||||
[
|
[
|
||||||
H.h2 ~a:H.[a_id "readme"] [H.txt "README"];
|
H.h2 ~a:H.[a_id "readme"] [H.txt "README"];
|
||||||
H.a ~a:H.[a_href "#builds"] [H.txt "Skip to builds"];
|
H.a ~a:H.[a_href "#builds"] [H.txt "Skip to builds"];
|
||||||
H.Unsafe.data (Utils.md_to_html ~adjust_heading:2 data)
|
H.Unsafe.data (Utils.Omd.html_of_string data)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -491,7 +487,7 @@ module Job_build = struct
|
||||||
pp_devices block_devices pp_devices net_devices]
|
pp_devices block_devices pp_devices net_devices]
|
||||||
in
|
in
|
||||||
let aux (file:Builder_db.file) =
|
let aux (file:Builder_db.file) =
|
||||||
let sha256_hex = Ohex.encode file.sha256 in
|
let (`Hex sha256_hex) = Hex.of_cstruct file.sha256 in
|
||||||
[
|
[
|
||||||
H.dt [
|
H.dt [
|
||||||
H.a ~a:H.[a_href @@ Link.Job_build_artifact.make
|
H.a ~a:H.[a_href @@ Link.Job_build_artifact.make
|
||||||
|
@ -864,7 +860,7 @@ let compare_builds
|
||||||
~(build_right : Builder_db.Build.t)
|
~(build_right : Builder_db.Build.t)
|
||||||
~env_diff:(added_env, removed_env, changed_env)
|
~env_diff:(added_env, removed_env, changed_env)
|
||||||
~pkg_diff:(added_pkgs, removed_pkgs, changed_pkgs)
|
~pkg_diff:(added_pkgs, removed_pkgs, changed_pkgs)
|
||||||
~opam_diff:(opam_diff, version_diff, left, right, duniverse)
|
~opam_diff:(opam_diff, version_diff, left, right, duniverse_content_diff, duniverse_left, duniverse_right)
|
||||||
=
|
=
|
||||||
let items, data =
|
let items, data =
|
||||||
List.fold_left (fun (items, data) (id, txt, amount, code) ->
|
List.fold_left (fun (items, data) (id, txt, amount, code) ->
|
||||||
|
@ -875,24 +871,18 @@ let compare_builds
|
||||||
H.li [ H.a ~a:[H.a_href id_href] [txtf "%d %s" amount txt] ] :: items,
|
H.li [ H.a ~a:[H.a_href id_href] [txtf "%d %s" amount txt] ] :: items,
|
||||||
data @ H.h3 ~a:[H.a_id id] [H.txt txt] :: code)
|
data @ H.h3 ~a:[H.a_id id] [H.txt txt] :: code)
|
||||||
([], [])
|
([], [])
|
||||||
([ ("opam-packages-removed", "Opam packages removed",
|
[ ("opam-packages-removed", "Opam packages removed",
|
||||||
OpamPackage.Set.cardinal left, [ H.code (packages left) ]) ;
|
OpamPackage.Set.cardinal left, [ H.code (packages left) ]) ;
|
||||||
("opam-packages-installede", "New opam packages installed",
|
("opam-packages-installede", "New opam packages installed",
|
||||||
OpamPackage.Set.cardinal right, [ H.code (packages right) ]) ;
|
OpamPackage.Set.cardinal right, [ H.code (packages right) ]) ;
|
||||||
("opam-packages-version-diff", "Opam packages with version changes",
|
("opam-packages-version-diff", "Opam packages with version changes",
|
||||||
List.length version_diff, [ H.code (package_diffs version_diff) ]) ;
|
List.length version_diff, [ H.code (package_diffs version_diff) ]) ;
|
||||||
] @ (match duniverse with
|
|
||||||
| Ok (duniverse_left, duniverse_right, duniverse_content_diff) ->
|
|
||||||
[
|
|
||||||
("duniverse-dirs-removed", "Duniverse directories removed",
|
("duniverse-dirs-removed", "Duniverse directories removed",
|
||||||
List.length duniverse_left, [ H.code (duniverse_dirs duniverse_left) ]) ;
|
List.length duniverse_left, [ H.code (duniverse_dirs duniverse_left) ]) ;
|
||||||
("duniverse-dirs-installed", "New duniverse directories installed",
|
("duniverse-dirs-installed", "New duniverse directories installed",
|
||||||
List.length duniverse_right, [ H.code (duniverse_dirs duniverse_right) ]) ;
|
List.length duniverse_right, [ H.code (duniverse_dirs duniverse_right) ]) ;
|
||||||
("duniverse-dirs-content-diff", "Duniverse directories with content changes",
|
("duniverse-dirs-content-diff", "Duniverse directories with content changes",
|
||||||
List.length duniverse_content_diff, [ H.code (duniverse_diffs duniverse_content_diff) ]) ;
|
List.length duniverse_content_diff, [ H.code (duniverse_diffs duniverse_content_diff) ]) ;
|
||||||
]
|
|
||||||
| Error `Msg msg -> [ "duniverse-dirs-error", "Duniverse parsing error", 1, [ H.txt msg ] ]
|
|
||||||
) @ [
|
|
||||||
("opam-packages-opam-diff", "Opam packages with changes in their opam file",
|
("opam-packages-opam-diff", "Opam packages with changes in their opam file",
|
||||||
List.length opam_diff, opam_diffs opam_diff) ;
|
List.length opam_diff, opam_diffs opam_diff) ;
|
||||||
("env-removed", "Environment variables removed",
|
("env-removed", "Environment variables removed",
|
||||||
|
@ -907,7 +897,7 @@ let compare_builds
|
||||||
List.length added_pkgs, [ H.code (key_values added_pkgs) ]) ;
|
List.length added_pkgs, [ H.code (key_values added_pkgs) ]) ;
|
||||||
("pkgs-changed", "System packages changed",
|
("pkgs-changed", "System packages changed",
|
||||||
List.length changed_pkgs, [ H.code (key_value_changes changed_pkgs) ]) ;
|
List.length changed_pkgs, [ H.code (key_value_changes changed_pkgs) ]) ;
|
||||||
])
|
]
|
||||||
in
|
in
|
||||||
layout
|
layout
|
||||||
~nav:(`Comparison ((job_left, build_left), (job_right, build_right)))
|
~nav:(`Comparison ((job_left, build_left), (job_right, build_right)))
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
module Set = OpamPackage.Set
|
module Set = OpamPackage.Set
|
||||||
|
|
||||||
|
type package = OpamPackage.t
|
||||||
|
|
||||||
let packages (switch : OpamFile.SwitchExport.t) =
|
let packages (switch : OpamFile.SwitchExport.t) =
|
||||||
assert (Set.cardinal switch.selections.sel_pinned = 0);
|
assert (Set.cardinal switch.selections.sel_pinned = 0);
|
||||||
assert (Set.cardinal switch.selections.sel_compiler = 0);
|
assert (Set.cardinal switch.selections.sel_compiler = 0);
|
||||||
|
@ -37,11 +39,7 @@ let duniverse_dirs_data =
|
||||||
in
|
in
|
||||||
let* dir = string ~ctx:"directory" dir in
|
let* dir = string ~ctx:"directory" dir in
|
||||||
Ok (url, dir, List.rev hashes)
|
Ok (url, dir, List.rev hashes)
|
||||||
| { pelem = List { pelem = [ url ; dir ] ; _ } ; _ } ->
|
| _ -> Error (`Msg "expected a string or identifier")
|
||||||
let* url = string ~ctx:"url" url in
|
|
||||||
let* dir = string ~ctx:"directory" dir in
|
|
||||||
Ok (url, dir, [])
|
|
||||||
| _ -> Error (`Msg "expected a list of URL, DIR, [HASHES]")
|
|
||||||
in
|
in
|
||||||
function
|
function
|
||||||
| { pelem = List { pelem = lbody ; _ } ; _ } ->
|
| { pelem = List { pelem = lbody ; _ } ; _ } ->
|
||||||
|
@ -56,15 +54,15 @@ let duniverse (switch : OpamFile.SwitchExport.t) =
|
||||||
let root = switch.OpamFile.SwitchExport.selections.OpamTypes.sel_roots in
|
let root = switch.OpamFile.SwitchExport.selections.OpamTypes.sel_roots in
|
||||||
if OpamPackage.Set.cardinal root = 1 then
|
if OpamPackage.Set.cardinal root = 1 then
|
||||||
let root = OpamPackage.Set.choose root in
|
let root = OpamPackage.Set.choose root in
|
||||||
match OpamPackage.(Name.Map.find_opt root.name switch.OpamFile.SwitchExport.overlays) with
|
Option.bind
|
||||||
| None -> Error (`Msg "opam switch export doesn't contain the main package")
|
OpamPackage.(Name.Map.find_opt root.name switch.OpamFile.SwitchExport.overlays)
|
||||||
| Some opam ->
|
(fun opam ->
|
||||||
match OpamFile.OPAM.extended opam duniverse_dir duniverse_dirs_data with
|
match OpamFile.OPAM.extended opam duniverse_dir duniverse_dirs_data with
|
||||||
| None -> Ok None
|
| None -> None
|
||||||
| Some Error e -> Error e
|
| Some Error _ -> None
|
||||||
| Some Ok v -> Ok (Some v)
|
| Some Ok v -> Some v)
|
||||||
else
|
else
|
||||||
Error (`Msg "not a single root package found in opam switch export")
|
None
|
||||||
|
|
||||||
type duniverse_diff = {
|
type duniverse_diff = {
|
||||||
name : string ;
|
name : string ;
|
||||||
|
@ -91,19 +89,11 @@ let duniverse_diff l r =
|
||||||
let keys_l_only = ref [] and keys_r_only = ref [] and diff = ref [] in
|
let keys_l_only = ref [] and keys_r_only = ref [] and diff = ref [] in
|
||||||
let equal_hashes l r =
|
let equal_hashes l r =
|
||||||
(* l and r are lists of pairs, with the hash kind and its value *)
|
(* l and r are lists of pairs, with the hash kind and its value *)
|
||||||
(* for a git remote, the hashes are empty lists *)
|
|
||||||
(match l with [] -> false | _ -> true) &&
|
|
||||||
(match r with [] -> false | _ -> true) &&
|
|
||||||
List.for_all (fun (h, v) ->
|
List.for_all (fun (h, v) ->
|
||||||
match List.assoc_opt h r with
|
match List.assoc_opt h r with
|
||||||
| None -> false
|
| None -> true
|
||||||
| Some v' -> String.equal v v')
|
| Some v' -> String.equal v v')
|
||||||
l &&
|
l
|
||||||
List.for_all (fun (h, v) ->
|
|
||||||
match List.assoc_opt h l with
|
|
||||||
| None -> false
|
|
||||||
| Some v' -> String.equal v v')
|
|
||||||
r
|
|
||||||
in
|
in
|
||||||
let _ =
|
let _ =
|
||||||
M.merge (fun key l r ->
|
M.merge (fun key l r ->
|
||||||
|
@ -112,7 +102,6 @@ let duniverse_diff l r =
|
||||||
| Some _, None -> keys_l_only := key :: !keys_l_only; None
|
| Some _, None -> keys_l_only := key :: !keys_l_only; None
|
||||||
| None, None -> None
|
| None, None -> None
|
||||||
| Some (_, l), Some (_, r) when equal_hashes l r -> None
|
| Some (_, l), Some (_, r) when equal_hashes l r -> None
|
||||||
| Some (url1, []), Some (url2, []) when String.equal url1 url2 -> None
|
|
||||||
| Some l, Some r -> diff := (key, l, r) :: !diff; None)
|
| Some l, Some r -> diff := (key, l, r) :: !diff; None)
|
||||||
l r
|
l r
|
||||||
in
|
in
|
||||||
|
@ -269,9 +258,8 @@ let compare left right =
|
||||||
and right_pkgs = diff packages_right packages_left
|
and right_pkgs = diff packages_right packages_left
|
||||||
in
|
in
|
||||||
let opam_diff = detailed_opam_diffs left right opam_diff in
|
let opam_diff = detailed_opam_diffs left right opam_diff in
|
||||||
let duniverse_ret =
|
let left_duniverse, right_duniverse, duniverse_diff =
|
||||||
match duniverse left, duniverse right with
|
duniverse_diff (duniverse left) (duniverse right)
|
||||||
| Ok l, Ok r -> Ok (duniverse_diff l r)
|
|
||||||
| Error _ as e, _ | _, (Error _ as e) -> e
|
|
||||||
in
|
in
|
||||||
(opam_diff, version_diff, left_pkgs, right_pkgs, duniverse_ret)
|
(opam_diff, version_diff, left_pkgs, right_pkgs,
|
||||||
|
duniverse_diff, left_duniverse, right_duniverse)
|
||||||
|
|
|
@ -1,39 +0,0 @@
|
||||||
type opam_diff = {
|
|
||||||
pkg : OpamPackage.t ;
|
|
||||||
build : (OpamTypes.command list * OpamTypes.command list) option ;
|
|
||||||
install : (OpamTypes.command list * OpamTypes.command list) option ;
|
|
||||||
url : (OpamFile.URL.t option * OpamFile.URL.t option) option ;
|
|
||||||
otherwise_equal : bool ;
|
|
||||||
}
|
|
||||||
|
|
||||||
type version_diff = {
|
|
||||||
name : OpamPackage.Name.t;
|
|
||||||
version_left : OpamPackage.Version.t;
|
|
||||||
version_right : OpamPackage.Version.t;
|
|
||||||
}
|
|
||||||
|
|
||||||
type duniverse_diff = {
|
|
||||||
name : string ;
|
|
||||||
urls : string * string option ;
|
|
||||||
hash : (OpamHash.kind * string option * string option) list ;
|
|
||||||
}
|
|
||||||
|
|
||||||
val pp_opampackage : Format.formatter -> OpamPackage.t -> unit
|
|
||||||
|
|
||||||
val pp_version_diff : Format.formatter -> version_diff -> unit
|
|
||||||
|
|
||||||
val pp_duniverse_diff : Format.formatter -> duniverse_diff -> unit
|
|
||||||
|
|
||||||
val pp_duniverse_dir : Format.formatter -> string * string -> unit
|
|
||||||
|
|
||||||
val pp_opam_diff : Format.formatter -> opam_diff -> unit
|
|
||||||
|
|
||||||
val commands_to_strings : OpamTypes.command list * OpamTypes.command list -> string list * string list
|
|
||||||
|
|
||||||
val opt_url_to_string : OpamFile.URL.t option * OpamFile.URL.t option -> string * string
|
|
||||||
|
|
||||||
|
|
||||||
val compare: OpamFile.SwitchExport.t ->
|
|
||||||
OpamFile.SwitchExport.t ->
|
|
||||||
opam_diff list * version_diff list * OpamPackage.Set.t * OpamPackage.Set.t * ((string * string) list * (string * string) list * duniverse_diff list, [> `Msg of string ]) result
|
|
||||||
|
|
|
@ -32,8 +32,6 @@ Options:
|
||||||
Hex encoded SHA256 digest of the main binary.
|
Hex encoded SHA256 digest of the main binary.
|
||||||
--job=STRING
|
--job=STRING
|
||||||
Job name that was built.
|
Job name that was built.
|
||||||
--main-binary-filepath=STRING
|
|
||||||
The file path of the main binary.
|
|
||||||
EOM
|
EOM
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
@ -41,7 +39,6 @@ EOM
|
||||||
BUILD_TIME=
|
BUILD_TIME=
|
||||||
SHA=
|
SHA=
|
||||||
JOB=
|
JOB=
|
||||||
FILEPATH=
|
|
||||||
|
|
||||||
while [ $# -gt 1 ]; do
|
while [ $# -gt 1 ]; do
|
||||||
OPT="$1"
|
OPT="$1"
|
||||||
|
@ -56,9 +53,6 @@ while [ $# -gt 1 ]; do
|
||||||
--job=*)
|
--job=*)
|
||||||
JOB="${OPT##*=}"
|
JOB="${OPT##*=}"
|
||||||
;;
|
;;
|
||||||
--main-binary-filepath=*)
|
|
||||||
FILEPATH="${OPT##*=}"
|
|
||||||
;;
|
|
||||||
--*)
|
--*)
|
||||||
warn "Ignoring unknown option: '${OPT}'"
|
warn "Ignoring unknown option: '${OPT}'"
|
||||||
;;
|
;;
|
||||||
|
@ -73,14 +67,13 @@ done
|
||||||
[ -z "${BUILD_TIME}" ] && die "The --build-time option must be specified"
|
[ -z "${BUILD_TIME}" ] && die "The --build-time option must be specified"
|
||||||
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
|
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
|
||||||
[ -z "${JOB}" ] && die "The --job option must be specified"
|
[ -z "${JOB}" ] && die "The --job option must be specified"
|
||||||
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
|
|
||||||
|
|
||||||
FILENAME="${1}"
|
FILENAME="${1}"
|
||||||
|
|
||||||
: "${REPO:="/usr/local/www/pkg"}"
|
: "${REPO:="/usr/local/www/pkg"}"
|
||||||
: "${REPO_KEY:="/usr/local/etc/builder-web/repo.key"}"
|
: "${REPO_KEY:="/usr/local/etc/builder-web/repo.key"}"
|
||||||
|
|
||||||
if [ "$(basename "${FILEPATH}" .pkg)" = "$(basename "${FILEPATH}")" ]; then
|
if [ "$(basename "${FILENAME}" .pkg)" = "$(basename "${FILENAME}")" ]; then
|
||||||
echo "Not a FreeBSD package"
|
echo "Not a FreeBSD package"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
@ -131,7 +124,6 @@ PKG_DIR="${REPO_DIR}/All"
|
||||||
# and then move it before recreating the index
|
# and then move it before recreating the index
|
||||||
pkg create -r "${PKG_ROOT}" -m "${MANIFEST}" -o "${TMP}"
|
pkg create -r "${PKG_ROOT}" -m "${MANIFEST}" -o "${TMP}"
|
||||||
mkdir -p "${PKG_DIR}"
|
mkdir -p "${PKG_DIR}"
|
||||||
rm -f "${PKG_DIR}"/"${NAME}"-*.pkg
|
|
||||||
mv "${TMP}/${NAME}-${FULL_VERSION}.pkg" "${PKG_DIR}"
|
mv "${TMP}/${NAME}-${FULL_VERSION}.pkg" "${PKG_DIR}"
|
||||||
|
|
||||||
pkg repo "${REPO_DIR}" "${REPO_KEY}"
|
pkg repo "${REPO_DIR}" "${REPO_KEY}"
|
||||||
|
|
|
@ -2,7 +2,7 @@ name: builder-web
|
||||||
version: %%VERSION_NUM%%
|
version: %%VERSION_NUM%%
|
||||||
origin: local/builder-web
|
origin: local/builder-web
|
||||||
comment: Builder web service
|
comment: Builder web service
|
||||||
www: https://git.robur.coop/robur/builder-web
|
www: https://git.robur.io/robur/builder-web
|
||||||
maintainer: Robur <team@robur.coop>
|
maintainer: Robur <team@robur.coop>
|
||||||
prefix: /usr/local
|
prefix: /usr/local
|
||||||
licenselogic: single
|
licenselogic: single
|
||||||
|
|
|
@ -33,7 +33,7 @@ procname="/usr/local/libexec/builder-web"
|
||||||
|
|
||||||
builder_web_start () {
|
builder_web_start () {
|
||||||
echo "Starting ${name}."
|
echo "Starting ${name}."
|
||||||
/usr/sbin/daemon -S -r -P "${pidfile}" -u "${builder_web_user}" \
|
/usr/sbin/daemon -S -p "${pidfile}" -u "${builder_web_user}" \
|
||||||
"${procname}" ${builder_web_flags}
|
"${procname}" ${builder_web_flags}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -77,8 +77,6 @@ done
|
||||||
DB="${DATA_DIR}/builder.sqlite3"
|
DB="${DATA_DIR}/builder.sqlite3"
|
||||||
[ ! -e "$DB" ] && die "The database doesn't exist: '$DB'"
|
[ ! -e "$DB" ] && die "The database doesn't exist: '$DB'"
|
||||||
|
|
||||||
# Let's be somewhat lenient with the database version.
|
|
||||||
# In visualizations.sh we can be more strict.
|
|
||||||
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
|
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
|
||||||
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
|
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
|
||||||
[ "$DB_VERSION" -lt 16 ] && die "The database version should be >= 16. It is '$DB_VERSION'"
|
[ "$DB_VERSION" -lt 16 ] && die "The database version should be >= 16. It is '$DB_VERSION'"
|
||||||
|
@ -129,22 +127,8 @@ fi
|
||||||
ATTEMPTED_VIZS=0
|
ATTEMPTED_VIZS=0
|
||||||
FAILED_VIZS=0
|
FAILED_VIZS=0
|
||||||
|
|
||||||
distinct-input () {
|
for i in $(find "${DATA_DIR}" -type f -path \*output/bin\*); do
|
||||||
{
|
UUID=$(echo "${i}" | rev | cut -d '/' -f 4 | rev)
|
||||||
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
|
|
||||||
FROM build b
|
|
||||||
JOIN build_artifact opam ON opam.build = b.id
|
|
||||||
WHERE opam.filepath = 'opam-switch' AND b.main_binary NOT NULL
|
|
||||||
GROUP BY opam.sha256;"
|
|
||||||
sqlite3 "${DATA_DIR}/builder.sqlite3" "SELECT b.uuid
|
|
||||||
FROM build b
|
|
||||||
JOIN build_artifact debug ON debug.build = b.id
|
|
||||||
WHERE debug.filepath LIKE '%.debug' AND b.main_binary NOT NULL
|
|
||||||
GROUP BY debug.sha256;"
|
|
||||||
} | sort -u
|
|
||||||
}
|
|
||||||
|
|
||||||
for UUID in $(distinct-input); do
|
|
||||||
if ! "$VISUALIZATIONS_CMD" \
|
if ! "$VISUALIZATIONS_CMD" \
|
||||||
--data-dir="${DATA_DIR}" \
|
--data-dir="${DATA_DIR}" \
|
||||||
--cache-dir="${CACHE_DIR}" \
|
--cache-dir="${CACHE_DIR}" \
|
||||||
|
|
|
@ -15,14 +15,11 @@ freebsd_sanitize_version () {
|
||||||
exit 1;
|
exit 1;
|
||||||
fi
|
fi
|
||||||
if [ $version_with_commit -eq 0 ]; then
|
if [ $version_with_commit -eq 0 ]; then
|
||||||
v="${v}.0.g0000000.${post}"
|
v="${v}.0.g0000000"
|
||||||
else
|
|
||||||
v="${v}.${post}"
|
|
||||||
fi
|
fi
|
||||||
echo $v
|
echo $v
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "using FreeBSD pkg to compare versions now:"
|
|
||||||
while read version_a version_b; do
|
while read version_a version_b; do
|
||||||
version_a=$(freebsd_sanitize_version $version_a)
|
version_a=$(freebsd_sanitize_version $version_a)
|
||||||
version_b=$(freebsd_sanitize_version $version_b)
|
version_b=$(freebsd_sanitize_version $version_b)
|
||||||
|
@ -30,28 +27,7 @@ while read version_a version_b; do
|
||||||
printf "%s %s %s\n" "$version_a" "$result" "$version_b"
|
printf "%s %s %s\n" "$version_a" "$result" "$version_b"
|
||||||
done < versions.txt
|
done < versions.txt
|
||||||
|
|
||||||
debian_sanitize_version () {
|
|
||||||
post=$(echo $1 | rev | cut -d '-' -f 1-2 | rev)
|
|
||||||
v=$(echo $1 | rev | cut -d '-' -f 3- | rev)
|
|
||||||
version_good=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$')
|
|
||||||
version_with_commit=$(echo $v | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$')
|
|
||||||
if [ $version_good -eq 0 -a $version_with_commit -eq 0 ]; then
|
|
||||||
echo "invalid version $v";
|
|
||||||
exit 1;
|
|
||||||
fi
|
|
||||||
if [ $version_with_commit -eq 0 ]; then
|
|
||||||
v="${v}-0-g0000000-${post}"
|
|
||||||
else
|
|
||||||
v="${v}-${post}"
|
|
||||||
fi
|
|
||||||
echo $v
|
|
||||||
}
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "using Debian dpkg to compare versions now:"
|
|
||||||
while read version_a version_b; do
|
while read version_a version_b; do
|
||||||
version_a=$(debian_sanitize_version $version_a)
|
|
||||||
version_b=$(debian_sanitize_version $version_b)
|
|
||||||
if dpkg --compare-versions "$version_a" lt "$version_b"; then
|
if dpkg --compare-versions "$version_a" lt "$version_b"; then
|
||||||
echo "$version_a < $version_b"
|
echo "$version_a < $version_b"
|
||||||
else
|
else
|
||||||
|
|
|
@ -4,9 +4,9 @@ Section: unknown
|
||||||
Priority: optional
|
Priority: optional
|
||||||
Maintainer: Robur Team <team@robur.coop>
|
Maintainer: Robur Team <team@robur.coop>
|
||||||
Standards-Version: 4.4.1
|
Standards-Version: 4.4.1
|
||||||
Homepage: https://git.robur.coop/robur/builder-web
|
Homepage: https://git.robur.io/robur/builder-web
|
||||||
Vcs-Browser: https://git.robur.coop/robur/builder-web
|
Vcs-Browser: https://git.robur.io/robur/builder-web
|
||||||
Vcs-Git: https://git.robur.coop/robur/builder-web.git
|
Vcs-Git: https://git.robur.io/robur/builder-web.git
|
||||||
Architecture: all
|
Architecture: all
|
||||||
Depends: libgmp10, libsqlite3-0, libev4, opam-graph, modulectomy
|
Depends: libgmp10, libsqlite3-0, libev4, opam-graph, modulectomy
|
||||||
Description: Web service for storing and presenting builds.
|
Description: Web service for storing and presenting builds.
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||||
Upstream-Name: builder-web
|
Upstream-Name: builder-web
|
||||||
Upstream-Contact: Robur Team <team@robur.coop>
|
Upstream-Contact: Robur Team <team@robur.coop>
|
||||||
Source: https://git.robur.coop/robur/builder-web
|
Source: https://git.robur.io/robur/builder-web
|
||||||
|
|
||||||
Files: *
|
Files: *
|
||||||
Copyright: "Reynir Björnsson <reynir@reynir.dk>" "Hannes Mehnert <hannes@mehnert.org>"
|
Copyright: "Reynir Björnsson <reynir@reynir.dk>" "Hannes Mehnert <hannes@mehnert.org>"
|
||||||
|
|
|
@ -36,8 +36,6 @@ Options:
|
||||||
Job name that was built.
|
Job name that was built.
|
||||||
--platform=STRING
|
--platform=STRING
|
||||||
Platform name on which the build was performed.
|
Platform name on which the build was performed.
|
||||||
--main-binary-filepath=STRING
|
|
||||||
The file path of the main binary.
|
|
||||||
EOM
|
EOM
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
@ -46,7 +44,6 @@ BUILD_TIME=
|
||||||
SHA=
|
SHA=
|
||||||
JOB=
|
JOB=
|
||||||
PLATFORM=
|
PLATFORM=
|
||||||
FILEPATH=
|
|
||||||
|
|
||||||
while [ $# -gt 1 ]; do
|
while [ $# -gt 1 ]; do
|
||||||
OPT="$1"
|
OPT="$1"
|
||||||
|
@ -64,9 +61,6 @@ while [ $# -gt 1 ]; do
|
||||||
--platform=*)
|
--platform=*)
|
||||||
PLATFORM="${OPT##*=}"
|
PLATFORM="${OPT##*=}"
|
||||||
;;
|
;;
|
||||||
--main-binary-filepath=*)
|
|
||||||
FILEPATH="${OPT##*=}"
|
|
||||||
;;
|
|
||||||
--*)
|
--*)
|
||||||
warn "Ignoring unknown option: '${OPT}'"
|
warn "Ignoring unknown option: '${OPT}'"
|
||||||
;;
|
;;
|
||||||
|
@ -82,11 +76,10 @@ done
|
||||||
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
|
[ -z "${SHA}" ] && die "The --sha256 option must be specified"
|
||||||
[ -z "${JOB}" ] && die "The --job option must be specified"
|
[ -z "${JOB}" ] && die "The --job option must be specified"
|
||||||
[ -z "${PLATFORM}" ] && die "The --platform option must be specified"
|
[ -z "${PLATFORM}" ] && die "The --platform option must be specified"
|
||||||
[ -z "${FILEPATH}" ] && die "The --main-binary-filepath option must be specified"
|
|
||||||
|
|
||||||
FILENAME="${1}"
|
FILENAME="${1}"
|
||||||
|
|
||||||
if [ $(basename "${FILEPATH}" .deb) = $(basename "${FILEPATH}") ]; then
|
if [ $(basename "${FILENAME}" .deb) = $(basename "${FILENAME}") ]; then
|
||||||
echo "Not a Debian package"
|
echo "Not a Debian package"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
@ -111,16 +104,6 @@ mkdir "${PKG_ROOT}"
|
||||||
dpkg-deb -R "${FILENAME}" "${PKG_ROOT}"
|
dpkg-deb -R "${FILENAME}" "${PKG_ROOT}"
|
||||||
|
|
||||||
VERSION=$(dpkg-deb -f "${FILENAME}" Version)
|
VERSION=$(dpkg-deb -f "${FILENAME}" Version)
|
||||||
# if we've a tagged version (1.5.0), append the number of commits and a dummy hash
|
|
||||||
VERSION_GOOD=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+$') || true
|
|
||||||
VERSION_WITH_COMMIT=$(echo $VERSION | grep -c '^[0-9]\+\.[0-9]\+\.[0-9]\+\-[0-9]\+\-g[0-9a-fA-f]\+$') || true
|
|
||||||
if [ $VERSION_GOOD -eq 0 -a $VERSION_WITH_COMMIT -eq 0 ]; then
|
|
||||||
die "version does not conform to (MAJOR.MINOR.PATCH[-#NUM_COMMITS-g<HASH>])"
|
|
||||||
fi
|
|
||||||
if [ $VERSION_WITH_COMMIT -eq 0 ]; then
|
|
||||||
VERSION="${VERSION}-0-g0000000"
|
|
||||||
fi
|
|
||||||
|
|
||||||
NEW_VERSION="${VERSION}"-"${BUILD_TIME}"-"${SHA}"
|
NEW_VERSION="${VERSION}"-"${BUILD_TIME}"-"${SHA}"
|
||||||
|
|
||||||
sed -i "" -e "s/Version:.*/Version: ${NEW_VERSION}/g" "${PKG_ROOT}/DEBIAN/control"
|
sed -i "" -e "s/Version:.*/Version: ${NEW_VERSION}/g" "${PKG_ROOT}/DEBIAN/control"
|
||||||
|
@ -131,8 +114,6 @@ if ! aptly repo show "${PLATFORM}" > /dev/null 2>&1; then
|
||||||
aptly repo create --distribution="${PLATFORM}" "${PLATFORM}"
|
aptly repo create --distribution="${PLATFORM}" "${PLATFORM}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
PACKAGE=$(dpkg-deb -f "${FILENAME}" Package)
|
|
||||||
aptly repo remove "${PLATFORM}" "${PACKAGE}"
|
|
||||||
aptly repo add "${PLATFORM}" "${TMP}"
|
aptly repo add "${PLATFORM}" "${TMP}"
|
||||||
|
|
||||||
: "${REPO_KEYID:="D5E2DC92617877EDF7D4FD4345EA05FB7E26053D"}"
|
: "${REPO_KEYID:="D5E2DC92617877EDF7D4FD4345EA05FB7E26053D"}"
|
||||||
|
|
|
@ -6,4 +6,3 @@
|
||||||
3.0.0-20230101-abcd 3.0.1-20230204-bdbd
|
3.0.0-20230101-abcd 3.0.1-20230204-bdbd
|
||||||
1.5.0-20220516-a0d5a2 1.5.0-3-g26b5a59-20220527-0bc180
|
1.5.0-20220516-a0d5a2 1.5.0-3-g26b5a59-20220527-0bc180
|
||||||
1.5.0-3-g26b5a59-20220527-0bc180 1.5.1-20220527-0bc180
|
1.5.0-3-g26b5a59-20220527-0bc180 1.5.1-20220527-0bc180
|
||||||
0.1.0-20221120104301-f9e456637274844d45d9758ec661a136d0cda7966b075e4426b69fe6da00427b 0.1.0-237-g62965d4-20230527202149-6118c39221f318154e234098b5cffd4dc1d80f19cf2200cc6b1eb768dbf6decb
|
|
||||||
|
|
|
@ -70,59 +70,46 @@ done
|
||||||
[ -z "${CACHE_DIR}" ] && die "The --cache-dir option must be specified"
|
[ -z "${CACHE_DIR}" ] && die "The --cache-dir option must be specified"
|
||||||
[ -z "${DATA_DIR}" ] && die "The --data-dir option must be specified"
|
[ -z "${DATA_DIR}" ] && die "The --data-dir option must be specified"
|
||||||
|
|
||||||
info "processing UUID '${UUID}'"
|
info "processing UUID '$UUID'"
|
||||||
|
|
||||||
DB="${DATA_DIR}/builder.sqlite3"
|
DB="${DATA_DIR}/builder.sqlite3"
|
||||||
|
|
||||||
# A new visualizations.sh script may be installed during an upgrade while the
|
|
||||||
# old builder-web binary is running. In that case things can get out of sync.
|
|
||||||
DB_VERSION="$(sqlite3 "$DB" "PRAGMA user_version;")"
|
|
||||||
[ -z "$DB_VERSION" ] && die "Couldn't read database version from '$DB'"
|
|
||||||
[ "$DB_VERSION" -ne 18 ] && die "The database version should be 18. It is '$DB_VERSION'"
|
|
||||||
|
|
||||||
APP_ID="$(sqlite3 "$DB" "PRAGMA application_id;")"
|
|
||||||
[ -z "$APP_ID" ] && die "Couldn't read application-id from '$DB'"
|
|
||||||
[ "$APP_ID" -ne 1234839235 ] && die "The application-id should be 1234839235. It is '$APP_ID'"
|
|
||||||
|
|
||||||
get_main_binary () {
|
get_main_binary () {
|
||||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
|
||||||
FROM build AS b
|
|
||||||
JOIN build_artifact AS ba ON ba.build = b.id AND b.main_binary = ba.id
|
JOIN build_artifact AS ba ON ba.build = b.id AND b.main_binary = ba.id
|
||||||
WHERE uuid = '${UUID}';"
|
WHERE uuid = '$UUID';"
|
||||||
}
|
}
|
||||||
|
|
||||||
BIN="${DATA_DIR}/$(get_main_binary)" || die "Failed to get main binary from database"
|
BIN="${DATA_DIR}/$(get_main_binary)" || die "Failed to get main binary from database"
|
||||||
[ -z "${BIN}" ] && die "No main-binary found in db '${DB}' for build '${UUID}'"
|
[ -z "${BIN}" ] && die "No main-binary found in db '$DB' for build '$UUID'"
|
||||||
|
|
||||||
get_debug_binary () {
|
get_debug_binary () {
|
||||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
|
||||||
FROM build AS b
|
|
||||||
JOIN build_artifact AS ba ON ba.build = b.id
|
JOIN build_artifact AS ba ON ba.build = b.id
|
||||||
WHERE
|
WHERE
|
||||||
uuid = '${UUID}'
|
uuid = '$UUID'
|
||||||
AND ba.filepath LIKE '%.debug';"
|
AND ba.localpath LIKE '%.debug';"
|
||||||
}
|
}
|
||||||
|
|
||||||
DEBUG_BIN_RELATIVE="$(get_debug_binary)" || die "Failed to get debug binary from database"
|
DEBUG_BIN_RELATIVE="$(get_debug_binary)" || die "Failed to get debug binary from database"
|
||||||
|
|
||||||
get_opam_switch () {
|
get_opam_switch () {
|
||||||
sqlite3 "${DB}" "SELECT '_artifacts/' || substr(lower(hex(ba.sha256)), 1, 2) || '/' || lower(hex(ba.sha256))
|
sqlite3 "${DB}" "SELECT ba.localpath FROM build AS b
|
||||||
FROM build AS b
|
|
||||||
JOIN build_artifact AS ba ON ba.build = b.id
|
JOIN build_artifact AS ba ON ba.build = b.id
|
||||||
WHERE
|
WHERE
|
||||||
uuid = '${UUID}'
|
uuid = '$UUID'
|
||||||
AND ba.filepath = 'opam-switch';"
|
AND ba.filepath = 'opam-switch';"
|
||||||
}
|
}
|
||||||
|
|
||||||
OPAM_SWITCH="$(get_opam_switch)" || die "Failed to get opam switch from database"
|
OPAM_SWITCH="$(get_opam_switch)" || die "Failed to get opam switch from database"
|
||||||
[ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '${DB}' for build '${UUID}'"
|
[ -z "${OPAM_SWITCH}" ] && die "No 'opam-switch' found in db '$DB' for build '$UUID'"
|
||||||
OPAM_SWITCH="${DATA_DIR}/${OPAM_SWITCH}"
|
OPAM_SWITCH="${DATA_DIR}/${OPAM_SWITCH}"
|
||||||
|
|
||||||
OPAM_GRAPH="opam-graph"
|
OPAM_GRAPH="opam-graph"
|
||||||
MODULECTOMY="modulectomy"
|
MODULECTOMY="modulectomy"
|
||||||
|
|
||||||
LATEST_TREEMAPVIZ_VERSION="$(${MODULECTOMY} --version)" || die "Failed to get modulectomy version"
|
LATEST_TREEMAPVIZ_VERSION="$($MODULECTOMY --version)" || die "Failed to get modulectomy version"
|
||||||
LATEST_DEPENDENCIESVIZ_VERSION="$(${OPAM_GRAPH} --version)" || die "Failed to get opam-graph version"
|
LATEST_DEPENDENCIESVIZ_VERSION="$($OPAM_GRAPH --version)" || die "Failed to get opam-graph version"
|
||||||
|
|
||||||
TREEMAP_CACHE_DIR="${CACHE_DIR}/treemap_${LATEST_TREEMAPVIZ_VERSION}"
|
TREEMAP_CACHE_DIR="${CACHE_DIR}/treemap_${LATEST_TREEMAPVIZ_VERSION}"
|
||||||
DEPENDENCIES_CACHE_DIR="${CACHE_DIR}/dependencies_${LATEST_DEPENDENCIESVIZ_VERSION}"
|
DEPENDENCIES_CACHE_DIR="${CACHE_DIR}/dependencies_${LATEST_DEPENDENCIESVIZ_VERSION}"
|
||||||
|
@ -149,7 +136,7 @@ trap cleanup EXIT
|
||||||
# /// Dependencies viz
|
# /// Dependencies viz
|
||||||
|
|
||||||
if [ ! -d "${DEPENDENCIES_CACHE_DIR}" ]; then
|
if [ ! -d "${DEPENDENCIES_CACHE_DIR}" ]; then
|
||||||
mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '${DEPENDENCIES_CACHE_DIR}'"
|
mkdir "${DEPENDENCIES_CACHE_DIR}" || die "Failed to create directory '$DEPENDENCIES_CACHE_DIR'"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
OPAM_SWITCH_FILEPATH='opam-switch'
|
OPAM_SWITCH_FILEPATH='opam-switch'
|
||||||
|
@ -157,8 +144,8 @@ OPAM_SWITCH_FILEPATH='opam-switch'
|
||||||
get_opam_switch_hash () {
|
get_opam_switch_hash () {
|
||||||
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
|
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
|
||||||
JOIN build_artifact AS ba ON ba.build = b.id
|
JOIN build_artifact AS ba ON ba.build = b.id
|
||||||
WHERE uuid = '${UUID}'
|
WHERE uuid = '$UUID'
|
||||||
AND ba.filepath = '${OPAM_SWITCH_FILEPATH}';"
|
AND ba.filepath = '$OPAM_SWITCH_FILEPATH';"
|
||||||
}
|
}
|
||||||
|
|
||||||
DEPENDENCIES_INPUT_HASH="$(get_opam_switch_hash)" || die "Failed to get opam-switch hash from database"
|
DEPENDENCIES_INPUT_HASH="$(get_opam_switch_hash)" || die "Failed to get opam-switch hash from database"
|
||||||
|
@ -168,8 +155,7 @@ if [ -e "${DEPENDENCIES_VIZ_FILENAME}" ]; then
|
||||||
info "Dependency visualization already exists: '${DEPENDENCIES_VIZ_FILENAME}'"
|
info "Dependency visualization already exists: '${DEPENDENCIES_VIZ_FILENAME}'"
|
||||||
else
|
else
|
||||||
if ${OPAM_GRAPH} --output-format=html "${OPAM_SWITCH}" > "${TMPDEPENDENCIES}"; then
|
if ${OPAM_GRAPH} --output-format=html "${OPAM_SWITCH}" > "${TMPDEPENDENCIES}"; then
|
||||||
cp "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}"
|
mv "${TMPDEPENDENCIES}" "${DEPENDENCIES_VIZ_FILENAME}"
|
||||||
rm "${TMPDEPENDENCIES}"
|
|
||||||
else
|
else
|
||||||
die "opam-graph failed to generate visualization"
|
die "opam-graph failed to generate visualization"
|
||||||
fi
|
fi
|
||||||
|
@ -187,16 +173,16 @@ stat_aux () {
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
SIZE="$(stat_aux "${BIN}")"
|
SIZE="$(stat_aux "$BIN")"
|
||||||
|
|
||||||
if [ ! -d "${TREEMAP_CACHE_DIR}" ]; then
|
if [ ! -d "${TREEMAP_CACHE_DIR}" ]; then
|
||||||
mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '${TREEMAP_CACHE_DIR}'"
|
mkdir "${TREEMAP_CACHE_DIR}" || die "Failed to create directory '$TREEMAP_CACHE_DIR'"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
get_debug_bin_hash () {
|
get_debug_bin_hash () {
|
||||||
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
|
sqlite3 "${DB}" "SELECT lower(hex(ba.sha256)) FROM build AS b
|
||||||
JOIN build_artifact AS ba ON ba.build = b.id
|
JOIN build_artifact AS ba ON ba.build = b.id
|
||||||
WHERE uuid = '${UUID}'
|
WHERE uuid = '$UUID'
|
||||||
AND ba.filepath LIKE '%.debug';"
|
AND ba.filepath LIKE '%.debug';"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -215,8 +201,7 @@ if [ -n "${DEBUG_BIN_RELATIVE}" ]; then
|
||||||
"${DEBUG_BIN}" \
|
"${DEBUG_BIN}" \
|
||||||
> "${TMPTREE}"
|
> "${TMPTREE}"
|
||||||
then
|
then
|
||||||
cp "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}"
|
mv "${TMPTREE}" "${TREEMAP_VIZ_FILENAME}"
|
||||||
rm "${TMPTREE}"
|
|
||||||
else
|
else
|
||||||
die "modulectomy failed to generate visualization"
|
die "modulectomy failed to generate visualization"
|
||||||
fi
|
fi
|
||||||
|
|
10
test/dune
10
test/dune
|
@ -1,16 +1,20 @@
|
||||||
(test
|
(test
|
||||||
(name test_builder_db)
|
(name test_builder_db)
|
||||||
(modules test_builder_db)
|
(modules test_builder_db)
|
||||||
(libraries ptime.clock.os builder_db caqti.blocking alcotest mirage-crypto-rng.unix ohex))
|
(libraries builder_db caqti.blocking alcotest mirage-crypto-rng.unix))
|
||||||
|
|
||||||
(test
|
(test
|
||||||
(name markdown_to_html)
|
(name markdown_to_html)
|
||||||
(modules markdown_to_html)
|
(modules markdown_to_html)
|
||||||
(libraries builder_web cmarkit alcotest))
|
(libraries builder_web alcotest))
|
||||||
|
|
||||||
(test
|
(test
|
||||||
(name router)
|
(name router)
|
||||||
(modules router)
|
(modules router)
|
||||||
(libraries builder_web fmt dream yojson alcotest)
|
(libraries builder_web fmt dream yojson alcotest)
|
||||||
(preprocess
|
(preprocess
|
||||||
(pps ppx_deriving.std ppx_deriving_yojson)))
|
(pps
|
||||||
|
ppx_deriving.std
|
||||||
|
ppx_deriving_yojson
|
||||||
|
))
|
||||||
|
)
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
let markdown_to_html = Builder_web__Utils.md_to_html
|
let markdown_to_html = Builder_web__Utils.Omd.html_of_string
|
||||||
|
|
||||||
let test_simple () =
|
let test_simple () =
|
||||||
let markdown = {|# Hello world|} in
|
let markdown = {|# Hello world|} in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "simple html" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a>Hello world</h1>\n" html)
|
Alcotest.(check string "simple html" "<h1>Hello world</h1>\n" html)
|
||||||
|
|
||||||
let test_html_script () =
|
let test_html_script () =
|
||||||
let markdown = {|# <script>Hello world</script>|} in
|
let markdown = {|# <script>Hello world</script>|} in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "html script header" "<h1 id=\"hello-world\"><a class=\"anchor\" aria-hidden=\"true\" href=\"#hello-world\"></a><!-- CommonMark raw HTML omitted -->Hello world<!-- CommonMark raw HTML omitted --></h1>\n" html)
|
Alcotest.(check string "html script header" "<h1>Hello world</h1>\n" html)
|
||||||
|
|
||||||
let test_preserve_span_content () =
|
let test_preserve_span_content () =
|
||||||
let markdown = {|* <span id="myref">My ref</span>
|
let markdown = {|* <span id="myref">My ref</span>
|
||||||
|
@ -16,8 +16,10 @@ let test_preserve_span_content () =
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "html span content preserved"
|
Alcotest.(check string "html span content preserved"
|
||||||
{|<ul>
|
{|<ul>
|
||||||
<li><!-- CommonMark raw HTML omitted -->My ref<!-- CommonMark raw HTML omitted --></li>
|
<li>My ref
|
||||||
<li><a href="#myref">See my ref</a> for more information</li>
|
</li>
|
||||||
|
<li>See my ref for more information
|
||||||
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|}
|
|}
|
||||||
html)
|
html)
|
||||||
|
@ -25,21 +27,20 @@ let test_preserve_span_content () =
|
||||||
let test_remove_script () =
|
let test_remove_script () =
|
||||||
let markdown = {|<script>alert(1);</script>|} in
|
let markdown = {|<script>alert(1);</script>|} in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "html script removed" "<!-- CommonMark HTML block omitted -->\n" html)
|
Alcotest.(check string "html script removed" "" html)
|
||||||
|
|
||||||
let test_list_with_html_block_and_markdown () =
|
let test_list_with_html_block_and_markdown () =
|
||||||
let markdown = "* <div> Hello, World!</div> *this is not html*" in
|
let markdown = "* <div> Hello, World!</div> *this is not html*" in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "list with html block and markdown"
|
Alcotest.(check string "list with html block and markdown"
|
||||||
(*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*)
|
(*"<ul>\n<li><em>this is not html</em>\n</li>\n</ul>\n"*) ""
|
||||||
"<ul>\n<li>\n<!-- CommonMark HTML block omitted -->\n</li>\n</ul>\n"
|
|
||||||
html)
|
html)
|
||||||
|
|
||||||
let test_list_with_inline_html_and_markdown () =
|
let test_list_with_inline_html_and_markdown () =
|
||||||
let markdown = "* <span> Hello, World!</span> *this is not html*" in
|
let markdown = "* <span> Hello, World!</span> *this is not html*" in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "list with html block and markdown"
|
Alcotest.(check string "list with html block and markdown"
|
||||||
"<ul>\n<li><!-- CommonMark raw HTML omitted --> Hello, World!<!-- CommonMark raw HTML omitted --> <em>this is not html</em></li>\n</ul>\n"
|
"<ul>\n<li> Hello, World! <em>this is not html</em>\n</li>\n</ul>\n"
|
||||||
html)
|
html)
|
||||||
|
|
||||||
let test_absolute_link () =
|
let test_absolute_link () =
|
||||||
|
@ -50,131 +51,35 @@ let test_absolute_link () =
|
||||||
let test_relative_link () =
|
let test_relative_link () =
|
||||||
let markdown = "[foo](../foo.jpg)" in
|
let markdown = "[foo](../foo.jpg)" in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "relative link" "<p><a href=\"../foo.jpg\">foo</a></p>\n" html)
|
Alcotest.(check string "relative link" "<p>foo</p>\n" html)
|
||||||
|
|
||||||
let test_absolute_image () =
|
let test_absolute_image () =
|
||||||
let markdown = "![alttext](https://foo.com/bar.jpg)" in
|
let markdown = "![alttext](https://foo.com/bar.jpg)" in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "absolute image"
|
Alcotest.(check string "absolute image"
|
||||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" ></p>\n" html)
|
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"alttext\" /></p>\n" html)
|
||||||
|
|
||||||
let test_absolute_image_no_alt () =
|
let test_absolute_image_no_alt () =
|
||||||
let markdown = "![](https://foo.com/bar.jpg)" in
|
let markdown = "![](https://foo.com/bar.jpg)" in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "absolute image"
|
Alcotest.(check string "absolute image"
|
||||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html)
|
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" /></p>\n" html)
|
||||||
|
|
||||||
let test_relative_image () =
|
let test_relative_image () =
|
||||||
let markdown = "![](/bar.jpg)" in
|
let markdown = "![](/bar.jpg)" in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "relative image" "<p><img src=\"/bar.jpg\" alt=\"\" ></p>\n" html)
|
Alcotest.(check string "relative image" "" html)
|
||||||
|
|
||||||
let test_absolute_image_script_alt () =
|
let test_absolute_image_script_alt () =
|
||||||
let markdown = "![<script src=\"bla.js\"></script>](https://foo.com/bar.jpg)" in
|
let markdown = "![<script src=\"bla.js\"></script>](https://foo.com/bar.jpg)" in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "absolute image with script alt text"
|
Alcotest.(check string "absolute image with script alt text"
|
||||||
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" ></p>\n" html)
|
"<p><img src=\"https://foo.com/bar.jpg\" alt=\"\" /></p>\n" html)
|
||||||
|
|
||||||
let test_fragment_link () =
|
let test_fragment_link () =
|
||||||
let markdown = "[fragment](#fragment)" in
|
let markdown = "[fragment](#fragment)" in
|
||||||
let html = markdown_to_html markdown in
|
let html = markdown_to_html markdown in
|
||||||
Alcotest.(check string "fragment link" "<p><a href=\"#fragment\">fragment</a></p>\n" html)
|
Alcotest.(check string "fragment link" "<p>fragment</p>\n" html)
|
||||||
|
|
||||||
let test_heading_adjustment () =
|
|
||||||
let markdown = {|# foo
|
|
||||||
## bar
|
|
||||||
# baz
|
|
||||||
## bazbar
|
|
||||||
### bazbarbar
|
|
||||||
#### bazbarbarbar
|
|
||||||
##### bazbarbarbarbar
|
|
||||||
###### bazbarbarbarbarbar
|
|
||||||
|}
|
|
||||||
in
|
|
||||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
|
||||||
(* NB: the maximum heading is 6 in cmarkit, thus we reduce the structure *)
|
|
||||||
let exp = {|<h3 id="foo"><a class="anchor" aria-hidden="true" href="#foo"></a>foo</h3>
|
|
||||||
<h4 id="bar"><a class="anchor" aria-hidden="true" href="#bar"></a>bar</h4>
|
|
||||||
<h3 id="baz"><a class="anchor" aria-hidden="true" href="#baz"></a>baz</h3>
|
|
||||||
<h4 id="bazbar"><a class="anchor" aria-hidden="true" href="#bazbar"></a>bazbar</h4>
|
|
||||||
<h5 id="bazbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbar"></a>bazbarbar</h5>
|
|
||||||
<h6 id="bazbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbar"></a>bazbarbarbar</h6>
|
|
||||||
<h6 id="bazbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbar"></a>bazbarbarbarbar</h6>
|
|
||||||
<h6 id="bazbarbarbarbarbar"><a class="anchor" aria-hidden="true" href="#bazbarbarbarbarbar"></a>bazbarbarbarbarbar</h6>
|
|
||||||
|} in
|
|
||||||
Alcotest.(check string "header adjustment works fine" exp html)
|
|
||||||
|
|
||||||
let test_table () =
|
|
||||||
let markdown = {__|| a | | b | c | d | e |
|
|
||||||
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
|
|
||||||
| entry | | **bla.file** | **other.file** | | |
|
|
||||||
| _another entry_ | | **another.file** | **another.other** | | |
|
|
||||||
|__}
|
|
||||||
in
|
|
||||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
|
||||||
let exp = {|<div role="region"><table>
|
|
||||||
<tr>
|
|
||||||
<th>a</th>
|
|
||||||
<th></th>
|
|
||||||
<th>b</th>
|
|
||||||
<th>c</th>
|
|
||||||
<th>d</th>
|
|
||||||
<th>e</th>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>entry</td>
|
|
||||||
<td></td>
|
|
||||||
<td><strong>bla.file</strong></td>
|
|
||||||
<td><strong>other.file</strong></td>
|
|
||||||
<td></td>
|
|
||||||
<td></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><em>another entry</em></td>
|
|
||||||
<td></td>
|
|
||||||
<td><strong>another.file</strong></td>
|
|
||||||
<td><strong>another.other</strong></td>
|
|
||||||
<td></td>
|
|
||||||
<td></td>
|
|
||||||
</tr>
|
|
||||||
</table></div>|} in
|
|
||||||
Alcotest.(check string "table is rendered as html" exp html)
|
|
||||||
|
|
||||||
let test_table2 () =
|
|
||||||
let markdown = {__|| a | | b | c | d | e |
|
|
||||||
| --------------------- |-| -------------- | -------------- | --------------- | ------ |
|
|
||||||
| entry | | | | **bla.file** | **other.file** |
|
|
||||||
| _another entry_ | | | **another.file** | **another.other** | |
|
|
||||||
|__}
|
|
||||||
in
|
|
||||||
let html = markdown_to_html ~adjust_heading:2 markdown in
|
|
||||||
let exp = {|<div role="region"><table>
|
|
||||||
<tr>
|
|
||||||
<th>a</th>
|
|
||||||
<th></th>
|
|
||||||
<th>b</th>
|
|
||||||
<th>c</th>
|
|
||||||
<th>d</th>
|
|
||||||
<th>e</th>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>entry</td>
|
|
||||||
<td></td>
|
|
||||||
<td></td>
|
|
||||||
<td></td>
|
|
||||||
<td><strong>bla.file</strong></td>
|
|
||||||
<td><strong>other.file</strong></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><em>another entry</em></td>
|
|
||||||
<td></td>
|
|
||||||
<td></td>
|
|
||||||
<td><strong>another.file</strong></td>
|
|
||||||
<td><strong>another.other</strong></td>
|
|
||||||
<td></td>
|
|
||||||
</tr>
|
|
||||||
</table></div>|} in
|
|
||||||
Alcotest.(check string "table is rendered as html" exp html)
|
|
||||||
|
|
||||||
let markdown_tests = [
|
let markdown_tests = [
|
||||||
Alcotest.test_case "Simple" `Quick test_simple;
|
Alcotest.test_case "Simple" `Quick test_simple;
|
||||||
|
@ -190,9 +95,6 @@ let markdown_tests = [
|
||||||
Alcotest.test_case "relative image" `Quick test_relative_image;
|
Alcotest.test_case "relative image" `Quick test_relative_image;
|
||||||
Alcotest.test_case "absolute image with script alt" `Quick test_absolute_image_script_alt;
|
Alcotest.test_case "absolute image with script alt" `Quick test_absolute_image_script_alt;
|
||||||
Alcotest.test_case "fragment link" `Quick test_fragment_link;
|
Alcotest.test_case "fragment link" `Quick test_fragment_link;
|
||||||
Alcotest.test_case "heading adjustment" `Quick test_heading_adjustment;
|
|
||||||
Alcotest.test_case "table" `Quick test_table;
|
|
||||||
Alcotest.test_case "table2" `Quick test_table2;
|
|
||||||
]
|
]
|
||||||
|
|
||||||
let () =
|
let () =
|
||||||
|
|
|
@ -14,30 +14,40 @@ module Param_verification = struct
|
||||||
|
|
||||||
module P = struct
|
module P = struct
|
||||||
|
|
||||||
let is_string : (string * string) -> _ option =
|
let is_string : (string * string) option -> _ option = function
|
||||||
Fun.const None
|
| Some _ -> None
|
||||||
|
| None -> None
|
||||||
|
|
||||||
let is_uuid (param, value) =
|
let is_uuid = function
|
||||||
match Uuidm.of_string value with
|
| Some (param, value) ->
|
||||||
|
begin match Uuidm.of_string value with
|
||||||
| Some _ when String.length value = 36 -> None
|
| Some _ when String.length value = 36 -> None
|
||||||
| _ -> Some {
|
| _ -> Some {
|
||||||
param;
|
param;
|
||||||
expected = "Uuidm.t"
|
expected = "Uuidm.t"
|
||||||
}
|
}
|
||||||
|
end
|
||||||
|
| None -> None
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
let verify parameters req =
|
let param req tag =
|
||||||
|
match Dream.param req tag with
|
||||||
|
| param -> Some (tag, param)
|
||||||
|
| exception _ -> None
|
||||||
|
|
||||||
|
let ( &&& ) v v' =
|
||||||
|
match v with
|
||||||
|
| None -> v'
|
||||||
|
| Some _ as some -> some
|
||||||
|
|
||||||
|
let verify req =
|
||||||
let verified_params =
|
let verified_params =
|
||||||
List.fold_left (fun acc p ->
|
P.is_string (param req "job")
|
||||||
match acc with
|
&&& P.is_uuid (param req "build")
|
||||||
| None ->
|
&&& P.is_uuid (param req "build_left")
|
||||||
if String.starts_with ~prefix:"build" p then
|
&&& P.is_uuid (param req "build_right")
|
||||||
P.is_uuid (p, Dream.param req p)
|
&&& P.is_string (param req "platform")
|
||||||
else
|
|
||||||
P.is_string (p, Dream.param req p)
|
|
||||||
| Some _ as x -> x)
|
|
||||||
None parameters
|
|
||||||
in
|
in
|
||||||
let response_json =
|
let response_json =
|
||||||
verified_params |> to_yojson |> Yojson.Safe.to_string
|
verified_params |> to_yojson |> Yojson.Safe.to_string
|
||||||
|
@ -46,22 +56,14 @@ module Param_verification = struct
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
let find_parameters path =
|
|
||||||
List.filter_map (fun s ->
|
|
||||||
if String.length s > 0 && String.get s 0 = ':' then
|
|
||||||
Some (String.sub s 1 (String.length s - 1))
|
|
||||||
else
|
|
||||||
None)
|
|
||||||
(String.split_on_char '/' path)
|
|
||||||
|
|
||||||
let router =
|
let router =
|
||||||
(* XXX: this relies on [Builder_web.routes] only using {data,cache,config}dir
|
(* XXX: this relies on [Builder_web.routes] only using {data,cache,config}dir
|
||||||
* in the handlers which are never called here. The path /nonexistant is
|
* in the handlers which are never called here. The path /nonexistant is
|
||||||
* assumed to not exist. *)
|
* assumed to not exist. *)
|
||||||
let nodir = Fpath.v "/nonexistant" in
|
let nodir = Fpath.v "/nonexistant" in
|
||||||
Builder_web.routes ~datadir:nodir ~cachedir:nodir ~configdir:nodir ~expired_jobs:0
|
Builder_web.routes ~datadir:nodir ~cachedir:nodir ~configdir:nodir
|
||||||
|> List.map (fun (meth, route, _handler) ->
|
|> List.map (fun (meth, route, _handler) ->
|
||||||
meth, route, Param_verification.verify (find_parameters route))
|
meth, route, Param_verification.verify)
|
||||||
|> Builder_web.to_dream_routes
|
|> Builder_web.to_dream_routes
|
||||||
|> Dream.router
|
|> Dream.router
|
||||||
(* XXX: we test without remove_trailing_url_slash to ensure we don't produce
|
(* XXX: we test without remove_trailing_url_slash to ensure we don't produce
|
||||||
|
@ -157,35 +159,5 @@ let () =
|
||||||
"/f/bin/unikernel.hvt";
|
"/f/bin/unikernel.hvt";
|
||||||
"/";
|
"/";
|
||||||
"";
|
"";
|
||||||
];
|
|
||||||
"Albatross hardcoded links",
|
|
||||||
[
|
|
||||||
(*> Note: these links can be found in
|
|
||||||
albatross/command-line/albatross_client_update.ml
|
|
||||||
.. to find them I follewed the trails of 'Albatross_cli.http_host'
|
|
||||||
*)
|
|
||||||
begin
|
|
||||||
let sha_str =
|
|
||||||
Digestif.SHA256.(to_raw_string (digest_string "foo"))
|
|
||||||
|> Ohex.encode
|
|
||||||
in
|
|
||||||
Fmt.str "/hash?sha256=%s" sha_str
|
|
||||||
end;
|
|
||||||
begin
|
|
||||||
let jobname = "foo" in
|
|
||||||
"/job/" ^ jobname ^ "/build/latest"
|
|
||||||
end;
|
|
||||||
begin
|
|
||||||
let job = "foo" in
|
|
||||||
let build = Uuidm.(v `V4 |> to_string) in
|
|
||||||
"/job/" ^ job ^ "/build/" ^ build ^ "/main-binary"
|
|
||||||
end;
|
|
||||||
begin
|
|
||||||
let old_uuid = Uuidm.(v `V4 |> to_string) in
|
|
||||||
let new_uuid = Uuidm.(v `V4 |> to_string) in
|
|
||||||
Fmt.str "/compare/%s/%s" old_uuid new_uuid
|
|
||||||
end;
|
|
||||||
]
|
]
|
||||||
|> List.map Alcotest.(fun p ->
|
|
||||||
test_case ("…" ^ p) `Quick (test_link `GET p))
|
|
||||||
]
|
]
|
||||||
|
|
|
@ -3,7 +3,7 @@ let ( >>| ) x f = Result.map f x
|
||||||
|
|
||||||
module type CONN = Caqti_blocking.CONNECTION
|
module type CONN = Caqti_blocking.CONNECTION
|
||||||
|
|
||||||
let () = Mirage_crypto_rng_unix.initialize (module Mirage_crypto_rng.Fortuna)
|
let () = Mirage_crypto_rng_unix.initialize ()
|
||||||
|
|
||||||
let iter f xs = List.fold_left (fun r x -> r >>= fun () -> f x) (Ok ()) xs
|
let iter f xs = List.fold_left (fun r x -> r >>= fun () -> f x) (Ok ()) xs
|
||||||
let get_opt message = function
|
let get_opt message = function
|
||||||
|
@ -25,8 +25,8 @@ module Testable = struct
|
||||||
x.restricted = y.restricted &&
|
x.restricted = y.restricted &&
|
||||||
match x.password_hash, y.password_hash with
|
match x.password_hash, y.password_hash with
|
||||||
| `Scrypt (hash, salt, params), `Scrypt (hash', salt', params') ->
|
| `Scrypt (hash, salt, params), `Scrypt (hash', salt', params') ->
|
||||||
String.equal hash hash' &&
|
Cstruct.equal hash hash' &&
|
||||||
String.equal salt salt' &&
|
Cstruct.equal salt salt' &&
|
||||||
params = params'
|
params = params'
|
||||||
in
|
in
|
||||||
let pp ppf { Builder_web_auth.username; password_hash; restricted } =
|
let pp ppf { Builder_web_auth.username; password_hash; restricted } =
|
||||||
|
@ -34,7 +34,7 @@ module Testable = struct
|
||||||
| `Scrypt (hash, salt, { Builder_web_auth.scrypt_n; scrypt_r; scrypt_p }) ->
|
| `Scrypt (hash, salt, { Builder_web_auth.scrypt_n; scrypt_r; scrypt_p }) ->
|
||||||
Format.fprintf ppf "user:%s;(%d,%d,%d);%B;%a;%a" username
|
Format.fprintf ppf "user:%s;(%d,%d,%d);%B;%a;%a" username
|
||||||
scrypt_n scrypt_r scrypt_p restricted
|
scrypt_n scrypt_r scrypt_p restricted
|
||||||
Ohex.pp hash Ohex.pp salt
|
Cstruct.hexdump_pp hash Cstruct.hexdump_pp salt
|
||||||
in
|
in
|
||||||
Alcotest.testable
|
Alcotest.testable
|
||||||
pp
|
pp
|
||||||
|
@ -43,15 +43,18 @@ module Testable = struct
|
||||||
let file =
|
let file =
|
||||||
let equal (x : Builder_db.Rep.file) (y : Builder_db.Rep.file) =
|
let equal (x : Builder_db.Rep.file) (y : Builder_db.Rep.file) =
|
||||||
Fpath.equal x.filepath y.filepath &&
|
Fpath.equal x.filepath y.filepath &&
|
||||||
String.equal x.sha256 y.sha256 &&
|
Fpath.equal x.localpath y.localpath &&
|
||||||
|
Cstruct.equal x.sha256 y.sha256 &&
|
||||||
x.size = y.size
|
x.size = y.size
|
||||||
in
|
in
|
||||||
let pp ppf { Builder_db.Rep.filepath; sha256; size } =
|
let pp ppf { Builder_db.Rep.filepath; localpath; sha256; size } =
|
||||||
Format.fprintf ppf "{@[<v 1>@;<1 0>Builder_db.Rep.filepath = %a;@;<1 0>\
|
Format.fprintf ppf "{@[<v 1>@;<1 0>Builder_db.Rep.filepath = %a;@;<1 0>\
|
||||||
|
localpath = %a;@;<1 0>\
|
||||||
sha256 = %a;@;<1 0>\
|
sha256 = %a;@;<1 0>\
|
||||||
size = %d;@;<1 0>\
|
size = %d;@;<1 0>\
|
||||||
@]@,}"
|
@]@,}"
|
||||||
Fpath.pp filepath Ohex.pp sha256 size
|
Fpath.pp filepath Fpath.pp localpath
|
||||||
|
Cstruct.hexdump_pp sha256 size
|
||||||
in
|
in
|
||||||
Alcotest.testable pp equal
|
Alcotest.testable pp equal
|
||||||
|
|
||||||
|
@ -130,13 +133,14 @@ let finish = Option.get (Ptime.of_float_s 1.)
|
||||||
let result = Builder.Exited 0
|
let result = Builder.Exited 0
|
||||||
let main_binary =
|
let main_binary =
|
||||||
let filepath = Result.get_ok (Fpath.of_string "bin/hello.sh") in
|
let filepath = Result.get_ok (Fpath.of_string "bin/hello.sh") in
|
||||||
|
let localpath = Result.get_ok (Fpath.of_string "/dev/null") in
|
||||||
let data = "#!/bin/sh\necho Hello, World\n" in
|
let data = "#!/bin/sh\necho Hello, World\n" in
|
||||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||||
let size = String.length data in
|
let size = String.length data in
|
||||||
{ Builder_db.Rep.filepath; sha256; size }
|
{ Builder_db.Rep.filepath; localpath; sha256; size }
|
||||||
let main_binary2 =
|
let main_binary2 =
|
||||||
let data = "#!/bin/sh\necho Hello, World 2\n" in
|
let data = "#!/bin/sh\necho Hello, World 2\n" in
|
||||||
let sha256 = Digestif.SHA256.(to_raw_string (digest_string data)) in
|
let sha256 = Mirage_crypto.Hash.SHA256.digest (Cstruct.of_string data) in
|
||||||
let size = String.length data in
|
let size = String.length data in
|
||||||
{ main_binary with sha256 ; size }
|
{ main_binary with sha256 ; size }
|
||||||
let platform = "exotic-os"
|
let platform = "exotic-os"
|
||||||
|
@ -145,6 +149,7 @@ let fail_if_none a =
|
||||||
Option.to_result ~none:(`Msg "Failed to retrieve") a
|
Option.to_result ~none:(`Msg "Failed to retrieve") a
|
||||||
|
|
||||||
let add_test_build user_id (module Db : CONN) =
|
let add_test_build user_id (module Db : CONN) =
|
||||||
|
let r =
|
||||||
let open Builder_db in
|
let open Builder_db in
|
||||||
Db.start () >>= fun () ->
|
Db.start () >>= fun () ->
|
||||||
Db.exec Job.try_add job_name >>= fun () ->
|
Db.exec Job.try_add job_name >>= fun () ->
|
||||||
|
@ -156,6 +161,9 @@ let add_test_build user_id (module Db : CONN) =
|
||||||
Db.find last_insert_rowid () >>= fun main_binary_id ->
|
Db.find last_insert_rowid () >>= fun main_binary_id ->
|
||||||
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
|
Db.exec Build.set_main_binary (id, main_binary_id) >>= fun () ->
|
||||||
Db.commit ()
|
Db.commit ()
|
||||||
|
in
|
||||||
|
Result.fold ~ok:Result.ok ~error:(fun _ -> Db.rollback ())
|
||||||
|
r
|
||||||
|
|
||||||
let with_build_db f () =
|
let with_build_db f () =
|
||||||
or_fail
|
or_fail
|
||||||
|
@ -219,7 +227,7 @@ let test_build_get_latest (module Db : CONN) =
|
||||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
||||||
Db.find_opt Builder_db.Build.get_latest_successful_with_binary (job_id, platform)
|
Db.find_opt Builder_db.Build.get_latest_successful_with_binary (job_id, platform)
|
||||||
>>| get_opt "no latest build" >>| fun (_id, meta, main_binary') ->
|
>>| get_opt "no latest build" >>| fun (_id, meta, main_binary') ->
|
||||||
Alcotest.(check Testable.file) "same main binary" main_binary2 main_binary';
|
Alcotest.(check (option Testable.file)) "same main binary" main_binary' (Some main_binary2);
|
||||||
Alcotest.(check Testable.uuid) "same uuid" meta.uuid uuid'
|
Alcotest.(check Testable.uuid) "same uuid" meta.uuid uuid'
|
||||||
|
|
||||||
let test_build_get_previous (module Db : CONN) =
|
let test_build_get_previous (module Db : CONN) =
|
||||||
|
@ -261,14 +269,6 @@ let test_artifact_get_by_build_uuid (module Db : CONN) =
|
||||||
get_opt "no build" >>| fun (_id, file) ->
|
get_opt "no build" >>| fun (_id, file) ->
|
||||||
Alcotest.(check Testable.file) "same file" file main_binary
|
Alcotest.(check Testable.file) "same file" file main_binary
|
||||||
|
|
||||||
let test_artifact_exists_true (module Db : CONN) =
|
|
||||||
Db.find Builder_db.Build_artifact.exists main_binary.sha256 >>| fun exists ->
|
|
||||||
Alcotest.(check bool) "main binary exists" true exists
|
|
||||||
|
|
||||||
let test_artifact_exists_false (module Db : CONN) =
|
|
||||||
Db.find Builder_db.Build_artifact.exists main_binary2.sha256 >>| fun exists ->
|
|
||||||
Alcotest.(check bool) "main binary2 doesn't exists" false exists
|
|
||||||
|
|
||||||
(* XXX: This test should fail because main_binary on the corresponding build
|
(* XXX: This test should fail because main_binary on the corresponding build
|
||||||
* references its main_binary. This is not the case now due to foreign key. *)
|
* references its main_binary. This is not the case now due to foreign key. *)
|
||||||
let test_artifact_remove_by_build (module Db : CONN) =
|
let test_artifact_remove_by_build (module Db : CONN) =
|
||||||
|
@ -276,39 +276,6 @@ let test_artifact_remove_by_build (module Db : CONN) =
|
||||||
get_opt "no build" >>= fun (id, _build) ->
|
get_opt "no build" >>= fun (id, _build) ->
|
||||||
Db.exec Builder_db.Build_artifact.remove_by_build id
|
Db.exec Builder_db.Build_artifact.remove_by_build id
|
||||||
|
|
||||||
let test_get_builds_older_than (module Db : CONN) =
|
|
||||||
add_second_build (module Db) >>= fun () ->
|
|
||||||
let date = Option.get (Ptime.of_float_s (3600. /. 2.)) in
|
|
||||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, date) >>= fun builds ->
|
|
||||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
|
||||||
Alcotest.(check (list Testable.uuid)) "last build" builds [ uuid ];
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_older_than (job_id, None, Ptime_clock.now ()) >>= fun builds ->
|
|
||||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
|
||||||
(* NOTE(dinosaure): from the most recent to the older. *)
|
|
||||||
Alcotest.(check (list Testable.uuid)) "last builds" builds [ uuid'; uuid ];
|
|
||||||
Ok ()
|
|
||||||
|
|
||||||
let test_builds_excluding_latest_n (module Db : CONN) =
|
|
||||||
add_second_build (module Db) >>= fun () ->
|
|
||||||
Db.find_opt Builder_db.Job.get_id_by_name job_name >>= fail_if_none >>= fun job_id ->
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 1) >>= fun builds ->
|
|
||||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
|
||||||
Alcotest.(check (list Testable.uuid)) "keep recent build" builds [ uuid ];
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 2) >>= fun builds ->
|
|
||||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
|
||||||
Alcotest.(check (list Testable.uuid)) "keep 2 builds" builds [];
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 3) >>= fun builds ->
|
|
||||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
|
||||||
Alcotest.(check (list Testable.uuid)) "last more builds than we have" builds [];
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, 0) >>= fun builds ->
|
|
||||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
|
||||||
Alcotest.(check (list Testable.uuid)) "delete all builds" builds [ uuid'; uuid ];
|
|
||||||
Db.collect_list Builder_db.Build.get_builds_excluding_latest_n (job_id, None, -1) >>= fun builds ->
|
|
||||||
let builds = List.map (fun (_, { Builder_db.Build.uuid; _ }) -> uuid) builds in
|
|
||||||
Alcotest.(check (list Testable.uuid)) "test an incomprehensible argument (-1)" builds [ uuid'; uuid ];
|
|
||||||
Ok ()
|
|
||||||
|
|
||||||
let () =
|
let () =
|
||||||
let open Alcotest in
|
let open Alcotest in
|
||||||
Alcotest.run "Builder_db" [
|
Alcotest.run "Builder_db" [
|
||||||
|
@ -339,12 +306,6 @@ let () =
|
||||||
"build-artifact", [
|
"build-artifact", [
|
||||||
test_case "Get all by build" `Quick (with_build_db test_artifact_get_all_by_build);
|
test_case "Get all by build" `Quick (with_build_db test_artifact_get_all_by_build);
|
||||||
test_case "Get by build uuid" `Quick (with_build_db test_artifact_get_by_build_uuid);
|
test_case "Get by build uuid" `Quick (with_build_db test_artifact_get_by_build_uuid);
|
||||||
test_case "Artifact exists" `Quick (with_build_db test_artifact_exists_true);
|
|
||||||
test_case "Other artifact doesn't exists" `Quick (with_build_db test_artifact_exists_false);
|
|
||||||
test_case "Remove by build" `Quick (with_build_db test_artifact_remove_by_build);
|
test_case "Remove by build" `Quick (with_build_db test_artifact_remove_by_build);
|
||||||
];
|
];
|
||||||
"vacuum", [
|
|
||||||
test_case "Get builds older than now" `Quick (with_build_db test_get_builds_older_than);
|
|
||||||
test_case "Get older builds and keep a fixed number of then" `Quick (with_build_db test_builds_excluding_latest_n);
|
|
||||||
]
|
|
||||||
]
|
]
|
||||||
|
|
Loading…
Reference in a new issue