Compare commits
6 Commits
55e0bbc6c6
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| b0079a2756 | |||
| 5171770f1a | |||
| 334ed4ce9a | |||
| e874c39dbe | |||
| 7191de0ee5 | |||
| 71d38cbd78 |
36
figure-out-flake-outputs.nix
Normal file
36
figure-out-flake-outputs.nix
Normal file
@@ -0,0 +1,36 @@
|
||||
outputAttrs: let
|
||||
|
||||
attrNamePathToString = attrNamePath: builtins.concatStringsSep "." attrNamePath;
|
||||
|
||||
outputInfo = drv: builtins.listToAttrs (builtins.map (outputName: {
|
||||
name = outputName;
|
||||
value = {
|
||||
store_path = drv."${outputName}".outPath;
|
||||
};
|
||||
}) drv.outputs);
|
||||
|
||||
recurseAttrs = attrs: attrNamePath: builtins.concatLists (builtins.attrValues (
|
||||
builtins.mapAttrs (attrName: attrValue:
|
||||
if builtins.typeOf attrValue == "set" then
|
||||
if builtins.hasAttr "type" attrValue then
|
||||
if attrValue.type == "derivation" then
|
||||
[ {
|
||||
name = attrNamePathToString (attrNamePath ++ [ attrName ]);
|
||||
value = {
|
||||
name = attrValue.name;
|
||||
derivation_path = attrValue.drvPath;
|
||||
system = attrValue.system;
|
||||
outputs = outputInfo attrValue;
|
||||
};
|
||||
} ]
|
||||
else
|
||||
[ "unknown type" ]
|
||||
else
|
||||
recurseAttrs attrValue (attrNamePath ++ [ attrName ])
|
||||
else
|
||||
[]
|
||||
) attrs
|
||||
));
|
||||
|
||||
in
|
||||
builtins.listToAttrs (recurseAttrs outputAttrs [])
|
||||
27
flake.lock
generated
Normal file
27
flake.lock
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1755615617,
|
||||
"narHash": "sha256-HMwfAJBdrr8wXAkbGhtcby1zGFvs+StOp19xNsbqdOg=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "20075955deac2583bb12f07151c2df830ef346b4",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
28
flake.nix
Normal file
28
flake.nix
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
};
|
||||
outputs = { self, nixpkgs, ... }: let
|
||||
forAllSystems = f: (nixpkgs.lib.genAttrs [ "x86_64-linux" "aarch64-linux" ] (system: let
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
in f { inherit pkgs system; } ));
|
||||
in {
|
||||
packages = forAllSystems ({ pkgs, system, ... }: {
|
||||
flake-tracker = pkgs.rustPlatform.buildRustPackage rec {
|
||||
pname = "flake-tracker";
|
||||
version = "0.1.0";
|
||||
|
||||
src = ./.;
|
||||
|
||||
cargoLock.lockFile = ./Cargo.lock;
|
||||
|
||||
};
|
||||
default = self.packages."${system}".flake-tracker;
|
||||
});
|
||||
|
||||
hydraJobs = {
|
||||
inherit (self)
|
||||
packages;
|
||||
};
|
||||
};
|
||||
}
|
||||
33
schema.sql
33
schema.sql
@@ -1,23 +1,20 @@
|
||||
CREATE TABLE flakes (
|
||||
flake_uri TEXT PRIMARY KEY NOT NULL,
|
||||
tracker_track BOOLEAN NOT NULL,
|
||||
tracker_last_scanned INT
|
||||
);
|
||||
|
||||
CREATE TABLE revisions (
|
||||
revision_uri TEXT PRIMARY KEY NOT NULL,
|
||||
flake_uri TEXT,
|
||||
nix_store_path TEXT,
|
||||
nar_hash TEXT,
|
||||
last_modified INT
|
||||
revision_uri TEXT NOT NULL PRIMARY KEY,
|
||||
flake_uri TEXT NOT NULL,
|
||||
store_path TEXT NOT NULL,
|
||||
last_modified INT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE inputs (
|
||||
CREATE TABLE output_attributes (
|
||||
revision_uri TEXT NOT NULL,
|
||||
input_name TEXT NOT NULL,
|
||||
locked_revision_uri TEXT,
|
||||
locked_flake_uri TEXT,
|
||||
locked_nar_hash TEXT,
|
||||
last_modified INT,
|
||||
PRIMARY KEY (revision_uri, input_name)
|
||||
output_attribute_name TEXT NOT NULL,
|
||||
derivation_path TEXT NOT NULL,
|
||||
PRIMARY KEY (revision_uri, output_attribute_name)
|
||||
);
|
||||
|
||||
CREATE TABLE build_outputs (
|
||||
derivation_path TEXT NOT NULL,
|
||||
build_output_name TEXT NOT NULL,
|
||||
store_path TEXT NOT NULL,
|
||||
PRIMARY KEY (derivation_path, build_output_name)
|
||||
);
|
||||
|
||||
35
src/bin/scan-revision.rs
Normal file
35
src/bin/scan-revision.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use anyhow::{
|
||||
Context,
|
||||
Result,
|
||||
};
|
||||
use clap::{
|
||||
Parser,
|
||||
};
|
||||
use flake_tracker::{
|
||||
scan::{
|
||||
scan_revision,
|
||||
},
|
||||
storage::{
|
||||
Storage,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Cli {
|
||||
flake_uri: String,
|
||||
revision_uri: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let storage = Storage::connect("sqlite://flake-tracker.db")
|
||||
.await
|
||||
.context("Failed to connect to database")?;
|
||||
|
||||
scan_revision(storage, &cli.flake_uri, Some(&cli.revision_uri)).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -76,11 +76,6 @@ pub fn assemble_flake_tarball_uri(flake_source: &FlakeSource) -> Result<String>
|
||||
out.push_str(&flake_source.url.clone()
|
||||
.context("Flake tarball uri does not contain an url")?);
|
||||
|
||||
if let Some(rev) = &flake_source.rev {
|
||||
out.push_str("?rev=");
|
||||
out.push_str(rev);
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
|
||||
27
src/main.rs
Normal file
27
src/main.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use anyhow::{
|
||||
Result,
|
||||
};
|
||||
use clap::{
|
||||
Parser,
|
||||
};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Cli {
|
||||
/// Path to config file
|
||||
#[arg(long, default_value_t = String::from("flake-tracker.json"))]
|
||||
config: String,
|
||||
/// Address to bind web server to
|
||||
#[arg(long, default_value_t = String::from("[::1]:3000"))]
|
||||
listen: String,
|
||||
/// Path to database file
|
||||
#[arg(long, default_value_t = String::from("flake-tracker.db"))]
|
||||
database: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
103
src/scan.rs
103
src/scan.rs
@@ -12,72 +12,53 @@ use crate::{
|
||||
FlakeUri,
|
||||
},
|
||||
storage::{
|
||||
InputRow,
|
||||
FlakeRow,
|
||||
OutputAttributeRow,
|
||||
RevisionRow,
|
||||
Storage,
|
||||
},
|
||||
};
|
||||
use serde::{
|
||||
Deserialize,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
use std::process::Command;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct OutputAttribute {
|
||||
pub derivation_path: String,
|
||||
pub name: String,
|
||||
pub outputs: HashMap<String, BuildOutput>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct BuildOutput {
|
||||
pub store_path: String,
|
||||
}
|
||||
|
||||
pub async fn scan_flake(storage: Storage, flake_uri: &str) -> Result<()> {
|
||||
let scan_time = Utc::now().timestamp();
|
||||
|
||||
let flake_metadata = fetch_metadata(flake_uri)
|
||||
.await?;
|
||||
|
||||
let revision_row = get_revision_from_metadata(&flake_metadata)?;
|
||||
let mut revision_row = get_revision_from_metadata(&flake_metadata)?;
|
||||
|
||||
storage.set_revision(revision_row)
|
||||
.await?;
|
||||
|
||||
let mut flake_row = get_flake_from_metadata(&flake_metadata)?;
|
||||
flake_row.tracker_last_scanned = Some(scan_time.clone());
|
||||
|
||||
storage.set_flake(flake_row)
|
||||
let flake_outputs = fetch_outputs(flake_uri)
|
||||
.await?;
|
||||
|
||||
let locks_root_name = &flake_metadata.locks.root;
|
||||
let locks_root_node = flake_metadata.locks.nodes.get(locks_root_name)
|
||||
.context("Failed to get locks root node")?;
|
||||
|
||||
for (input_name, locks_input_name) in locks_root_node.inputs.clone().context("No inputs found for flake")? {
|
||||
|
||||
if let FlakeLocksNodeInputs::String(locks_input_name) = locks_input_name {
|
||||
let locks_input_node = flake_metadata.locks.nodes.get(&locks_input_name)
|
||||
.context("Failed to find lock of input")?;
|
||||
|
||||
let input_row = InputRow {
|
||||
for (output_attribute_name, derivation_info) in &flake_outputs {
|
||||
let output_attribute_row = OutputAttributeRow {
|
||||
revision_uri: flake_metadata.locked.flake_uri()?.clone(),
|
||||
input_name: input_name.clone(),
|
||||
locked_revision_uri: Some(locks_input_node.locked.clone().context("Unexpected missing lock")?.flake_uri()?),
|
||||
locked_flake_uri: Some(locks_input_node.original.clone().context("Unexpected missing lock")?.flake_uri()?),
|
||||
locked_nar_hash: Some(locks_input_node.locked.clone().context("Unexpected missing lock")?.nar_hash),
|
||||
last_modified: Some(locks_input_node.locked.clone().context("Unexpected missing lock")?.last_modified),
|
||||
};
|
||||
storage.set_input(input_row)
|
||||
.await?;
|
||||
|
||||
let revision_row = RevisionRow {
|
||||
revision_uri: locks_input_node.locked.clone().context("Unexpected missing lock")?.flake_uri()?.clone(),
|
||||
flake_uri: Some(locks_input_node.original.clone().context("Unexpected missing lock")?.flake_uri()?.clone()),
|
||||
nix_store_path: None,
|
||||
nar_hash: None,
|
||||
last_modified: None,
|
||||
};
|
||||
storage.set_revision_exist(revision_row)
|
||||
.await?;
|
||||
|
||||
let flake_row = FlakeRow {
|
||||
flake_uri: locks_input_node.original.clone().context("Unexpected missing lock")?.flake_uri()?.clone(),
|
||||
tracker_track: false,
|
||||
tracker_last_scanned: None,
|
||||
output_attribute_name: output_attribute_name.clone(),
|
||||
derivation_path: derivation_info.derivation_path.clone(),
|
||||
};
|
||||
|
||||
storage.set_flake_exist(flake_row)
|
||||
storage.set_output_attribute(output_attribute_row)
|
||||
.await?;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -98,24 +79,32 @@ pub async fn fetch_metadata(flake_uri: &str) -> Result<FlakeMetadata> {
|
||||
Ok(flake_metadata)
|
||||
}
|
||||
|
||||
pub fn get_flake_from_metadata(flake_metadata: &FlakeMetadata) -> Result<FlakeRow> {
|
||||
let flake_row = FlakeRow {
|
||||
flake_uri: flake_metadata.resolved.flake_uri()?.clone(),
|
||||
tracker_track: false,
|
||||
tracker_last_scanned: None,
|
||||
};
|
||||
|
||||
Ok(flake_row)
|
||||
}
|
||||
|
||||
pub fn get_revision_from_metadata(flake_metadata: &FlakeMetadata) -> Result<RevisionRow> {
|
||||
let revision_row = RevisionRow {
|
||||
revision_uri: flake_metadata.locked.flake_uri()?.clone(),
|
||||
flake_uri: Some(flake_metadata.resolved.flake_uri()?.clone()),
|
||||
nix_store_path: Some(flake_metadata.path.clone()),
|
||||
nar_hash: Some(flake_metadata.locked.nar_hash.clone()),
|
||||
last_modified: Some(flake_metadata.locked.last_modified.clone()),
|
||||
flake_uri: flake_metadata.resolved.flake_uri()?.clone(),
|
||||
store_path: flake_metadata.path.clone(),
|
||||
last_modified: flake_metadata.locked.last_modified.clone(),
|
||||
};
|
||||
|
||||
Ok(revision_row)
|
||||
}
|
||||
|
||||
pub async fn fetch_outputs(flake_uri: &str) -> Result<HashMap<String, OutputAttribute>> {
|
||||
let figure_out_flake_outputs = std::include_str!("../figure-out-flake-outputs.nix");
|
||||
|
||||
let flake_outputs_raw = Command::new("nix")
|
||||
.arg("eval")
|
||||
.arg("--json")
|
||||
.arg(format!("{}#hydraJobs", flake_uri))
|
||||
.arg("--apply")
|
||||
.arg(figure_out_flake_outputs)
|
||||
.output()
|
||||
.context("Failed to fetch flake outputs")?;
|
||||
|
||||
println!("{}", str::from_utf8(&flake_outputs_raw.stdout)?);
|
||||
let flake_outputs: HashMap<String, OutputAttribute> = serde_json::from_slice(&flake_outputs_raw.stdout)
|
||||
.context("Failed to parse flake outputs")?;
|
||||
|
||||
Ok(flake_outputs)
|
||||
}
|
||||
|
||||
246
src/storage.rs
246
src/storage.rs
@@ -35,40 +35,12 @@ impl Storage {
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn set_flake(&self, flake_row: FlakeRow) -> Result<SqliteQueryResult> {
|
||||
sqlx::query("INSERT INTO flakes (flake_uri, tracker_track, tracker_last_scanned)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(flake_uri) DO UPDATE SET
|
||||
tracker_track=excluded.tracker_track,
|
||||
tracker_last_scanned=excluded.tracker_last_scanned
|
||||
")
|
||||
.bind(&flake_row.flake_uri)
|
||||
.bind(&flake_row.tracker_track)
|
||||
.bind(&flake_row.tracker_last_scanned)
|
||||
.execute(&self.db)
|
||||
.await
|
||||
.context("Failed to execute database query")
|
||||
}
|
||||
|
||||
pub async fn set_flake_exist(&self, flake_row: FlakeRow) -> Result<SqliteQueryResult> {
|
||||
sqlx::query("INSERT INTO flakes (flake_uri, tracker_track)
|
||||
VALUES (?, FALSE)
|
||||
ON CONFLICT(flake_uri) DO NOTHING
|
||||
")
|
||||
.bind(&flake_row.flake_uri)
|
||||
.execute(&self.db)
|
||||
.await
|
||||
.context("Failed to execute database query")
|
||||
}
|
||||
|
||||
pub async fn flakes(&self) -> Result<Vec<FlakeRow>> {
|
||||
sqlx::query_as("
|
||||
SELECT
|
||||
flake_uri,
|
||||
tracker_track,
|
||||
tracker_last_scanned
|
||||
FROM flakes
|
||||
ORDER BY flake_uri
|
||||
flake_uri
|
||||
FROM revisions
|
||||
GROUP BY flake_uri
|
||||
")
|
||||
.fetch_all(&self.db)
|
||||
.await
|
||||
@@ -76,211 +48,147 @@ impl Storage {
|
||||
}
|
||||
|
||||
pub async fn set_revision(&self, revision_row: RevisionRow) -> Result<SqliteQueryResult> {
|
||||
sqlx::query("INSERT INTO revisions (revision_uri, flake_uri, nix_store_path, nar_hash, last_modified)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
sqlx::query("INSERT INTO revisions (revision_uri, flake_uri, store_path, last_modified)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(revision_uri) DO UPDATE SET
|
||||
flake_uri=excluded.flake_uri,
|
||||
nix_store_path=excluded.nix_store_path,
|
||||
nar_hash=excluded.nar_hash,
|
||||
store_path=excluded.store_path,
|
||||
last_modified=excluded.last_modified
|
||||
")
|
||||
.bind(&revision_row.revision_uri)
|
||||
.bind(&revision_row.flake_uri)
|
||||
.bind(&revision_row.nix_store_path)
|
||||
.bind(&revision_row.nar_hash)
|
||||
.bind(&revision_row.store_path)
|
||||
.bind(&revision_row.last_modified)
|
||||
.execute(&self.db)
|
||||
.await
|
||||
.context("Failed to execute database query")
|
||||
}
|
||||
|
||||
pub async fn set_revision_exist(&self, revision_row: RevisionRow) -> Result<SqliteQueryResult> {
|
||||
sqlx::query("INSERT INTO revisions (revision_uri, flake_uri)
|
||||
VALUES (?, ?)
|
||||
ON CONFLICT(revision_uri) DO NOTHING
|
||||
")
|
||||
.bind(&revision_row.revision_uri)
|
||||
.bind(&revision_row.flake_uri)
|
||||
.execute(&self.db)
|
||||
.await
|
||||
.context("Failed to execute database query")
|
||||
}
|
||||
|
||||
pub async fn revisions_from_flake(&self, uri: &str) -> Result<Vec<RevisionRow>> {
|
||||
pub async fn revisions_from_flake(&self, flake_uri: &str) -> Result<Vec<RevisionRow>> {
|
||||
sqlx::query_as("
|
||||
SELECT
|
||||
revision_uri,
|
||||
flake_uri,
|
||||
nix_store_path,
|
||||
nar_hash,
|
||||
store_path,
|
||||
last_modified
|
||||
FROM revisions
|
||||
WHERE flake_uri = ?
|
||||
ORDER BY last_modified DESC
|
||||
")
|
||||
.bind(&uri)
|
||||
.bind(&flake_uri)
|
||||
.fetch_all(&self.db)
|
||||
.await
|
||||
.context("Failed to fetch data from database")
|
||||
}
|
||||
|
||||
pub async fn set_input(&self, input_row: InputRow) -> Result<SqliteQueryResult> {
|
||||
sqlx::query("INSERT INTO inputs (revision_uri, input_name, locked_revision_uri, locked_flake_uri, locked_nar_hash, last_modified)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(revision_uri, input_name) DO UPDATE SET
|
||||
locked_revision_uri=excluded.locked_revision_uri,
|
||||
locked_flake_uri=excluded.locked_flake_uri,
|
||||
locked_nar_hash=excluded.locked_nar_hash,
|
||||
last_modified=excluded.last_modified
|
||||
pub async fn revision(&self, revision_uri: &str) -> Result<RevisionRow> {
|
||||
sqlx::query_as("
|
||||
SELECT
|
||||
revision_uri,
|
||||
flake_uri,
|
||||
store_path,
|
||||
last_modified
|
||||
FROM revisions
|
||||
WHERE revision_uri = ?
|
||||
LIMIT 1
|
||||
")
|
||||
.bind(input_row.revision_uri)
|
||||
.bind(input_row.input_name)
|
||||
.bind(input_row.locked_revision_uri)
|
||||
.bind(input_row.locked_flake_uri)
|
||||
.bind(input_row.locked_nar_hash)
|
||||
.bind(input_row.last_modified)
|
||||
.bind(&revision_uri)
|
||||
.fetch_one(&self.db)
|
||||
.await
|
||||
.context("Failed to fetch data from database")
|
||||
}
|
||||
|
||||
pub async fn set_output_attribute(&self, output_attribute_row: OutputAttributeRow) -> Result<SqliteQueryResult> {
|
||||
sqlx::query("INSERT INTO output_attributes (revision_uri, output_attribute_name, derivation_path)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(revision_uri, output_attribute_name) DO UPDATE SET
|
||||
derivation_path=excluded.derivation_path
|
||||
")
|
||||
.bind(&output_attribute_row.revision_uri)
|
||||
.bind(&output_attribute_row.output_attribute_name)
|
||||
.bind(&output_attribute_row.derivation_path)
|
||||
.execute(&self.db)
|
||||
.await
|
||||
.context("Failed to execute database query")
|
||||
|
||||
}
|
||||
|
||||
pub async fn inputs_for_revision(&self, revision_uri: &str) -> Result<Vec<InputRow>> {
|
||||
pub async fn output_attributes_for_revision(&self, revision_uri: &str) -> Result<Vec<OutputAttributeRow>> {
|
||||
sqlx::query_as("
|
||||
SELECT
|
||||
revision_uri,
|
||||
input_name,
|
||||
locked_revision_uri,
|
||||
locked_flake_uri,
|
||||
locked_nar_hash,
|
||||
last_modified
|
||||
FROM inputs
|
||||
output_attribute_name,
|
||||
derivation_path
|
||||
FROM output_attributes
|
||||
WHERE revision_uri = ?
|
||||
ORDER BY input_name
|
||||
ORDER BY output_attribute_name DESC
|
||||
")
|
||||
.bind(&revision_uri)
|
||||
.fetch_all(&self.db)
|
||||
.await
|
||||
.context("Failed to fetch data from database")
|
||||
}
|
||||
|
||||
pub async fn input_of_for_revision(&self, revision_uri: &str) -> Result<Vec<InputRow>> {
|
||||
sqlx::query_as("
|
||||
SELECT
|
||||
revision_uri,
|
||||
input_name,
|
||||
locked_revision_uri,
|
||||
locked_flake_uri,
|
||||
locked_nar_hash,
|
||||
last_modified
|
||||
FROM inputs
|
||||
WHERE locked_revision_uri = ?
|
||||
ORDER BY input_name
|
||||
")
|
||||
.bind(&revision_uri)
|
||||
.fetch_all(&self.db)
|
||||
.await
|
||||
.context("Failed to fetch data from database")
|
||||
}
|
||||
|
||||
pub async fn current_inputs_for_flake(&self, flake_uri: &str) -> Result<Vec<InputRow>> {
|
||||
sqlx::query_as("
|
||||
|
||||
SELECT
|
||||
revisions.revision_uri,
|
||||
inputs.input_name,
|
||||
inputs.locked_revision_uri,
|
||||
inputs.locked_flake_uri,
|
||||
inputs.locked_nar_hash,
|
||||
inputs.last_modified,
|
||||
MAX(revisions.last_modified)
|
||||
FROM
|
||||
revisions
|
||||
LEFT JOIN
|
||||
inputs
|
||||
ON
|
||||
revisions.revision_uri = inputs.revision_uri
|
||||
WHERE
|
||||
revisions.flake_uri = ?
|
||||
GROUP BY
|
||||
inputs.input_name
|
||||
")
|
||||
.bind(&flake_uri)
|
||||
.fetch_all(&self.db)
|
||||
.await
|
||||
.context("Failed to fetch data from database")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(FromRow)]
|
||||
pub struct RevisionRow {
|
||||
pub revision_uri: String,
|
||||
pub flake_uri: Option<String>,
|
||||
pub nix_store_path: Option<String>,
|
||||
pub nar_hash: Option<String>,
|
||||
pub last_modified: Option<i64>,
|
||||
pub flake_uri: String,
|
||||
pub store_path: String,
|
||||
pub last_modified: i64,
|
||||
}
|
||||
|
||||
impl RevisionRow {
|
||||
pub fn revision_link(&self) -> String {
|
||||
format!("/r/{}", urlencode(&self.revision_uri))
|
||||
format!("/revisions/{}", urlencode(&self.revision_uri))
|
||||
}
|
||||
|
||||
pub fn flake_link(&self) -> String {
|
||||
match &self.flake_uri {
|
||||
Some(flake_uri) => format!("/f/{}", urlencode(&flake_uri)),
|
||||
None => String::from("#"),
|
||||
}
|
||||
format!("/flakes/{}", urlencode(&self.flake_uri))
|
||||
}
|
||||
|
||||
pub fn last_modified_time(&self) -> Option<DateTime<Utc>> {
|
||||
match &self.last_modified {
|
||||
Some(last_modified) => DateTime::from_timestamp(last_modified.clone(), 0),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(FromRow)]
|
||||
pub struct InputRow {
|
||||
pub revision_uri: String,
|
||||
pub input_name: String,
|
||||
pub locked_revision_uri: Option<String>,
|
||||
pub locked_flake_uri: Option<String>,
|
||||
pub locked_nar_hash: Option<String>,
|
||||
pub last_modified: Option<i64>,
|
||||
}
|
||||
|
||||
impl InputRow {
|
||||
pub fn revision_link(&self) -> String {
|
||||
format!("/r/{}", urlencode(&self.revision_uri))
|
||||
}
|
||||
|
||||
pub fn locked_revision_link(&self) -> String {
|
||||
match &self.locked_revision_uri {
|
||||
Some(locked_revision_uri) => format!("/r/{}", urlencode(&locked_revision_uri)),
|
||||
None => String::from("#"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn locked_flake_link(&self) -> String {
|
||||
match &self.locked_flake_uri {
|
||||
Some(locked_flake_uri) => format!("/f/{}", urlencode(&locked_flake_uri)),
|
||||
None => String::from("#"),
|
||||
}
|
||||
DateTime::from_timestamp(self.last_modified.clone(), 0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(FromRow)]
|
||||
pub struct FlakeRow {
|
||||
pub flake_uri: String,
|
||||
pub tracker_track: bool,
|
||||
pub tracker_last_scanned: Option<i64>,
|
||||
}
|
||||
|
||||
impl FlakeRow {
|
||||
pub fn flake_link(&self ) -> String {
|
||||
format!("/f/{}", urlencode(&self.flake_uri))
|
||||
format!("/flakes/{}", urlencode(&self.flake_uri))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(FromRow)]
|
||||
pub struct OutputAttributeRow {
|
||||
pub revision_uri: String,
|
||||
pub output_attribute_name: String,
|
||||
pub derivation_path: String,
|
||||
}
|
||||
|
||||
impl OutputAttributeRow {
|
||||
pub fn revision_link(&self) -> String {
|
||||
format!("/revisions/{}", urlencode(&self.revision_uri))
|
||||
}
|
||||
|
||||
pub fn derivation_link(&self) -> String {
|
||||
format!("/derivations/{}", urlencode(&self.derivation_path))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(FromRow)]
|
||||
pub struct BuildOutputRow {
|
||||
pub derivation_path: String,
|
||||
pub build_output_name: String,
|
||||
pub store_path: String,
|
||||
}
|
||||
|
||||
impl BuildOutputRow {
|
||||
pub fn derivation_link(&self ) -> String {
|
||||
format!("/derivations/{}", urlencode(&self.derivation_path))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use askama::{
|
||||
use crate::{
|
||||
storage::{
|
||||
FlakeRow,
|
||||
InputRow,
|
||||
OutputAttributeRow,
|
||||
RevisionRow,
|
||||
},
|
||||
};
|
||||
@@ -12,26 +12,20 @@ use crate::{
|
||||
#[derive(Template)]
|
||||
#[template(path = "index.html")]
|
||||
pub struct IndexTemplate {
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "flake_list.html")]
|
||||
pub struct FlakeListTemplate {
|
||||
pub flakes: Vec<FlakeRow>,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "flake.html")]
|
||||
pub struct FlakeTemplate {
|
||||
pub uri: String,
|
||||
pub flake_uri: String,
|
||||
pub revisions: Vec<RevisionRow>,
|
||||
pub current_inputs: Vec<InputRow>,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "revision.html")]
|
||||
pub struct RevisionTemplate {
|
||||
pub revision_uri: String,
|
||||
pub inputs: Vec<InputRow>,
|
||||
pub input_of: Vec<InputRow>,
|
||||
pub revision: RevisionRow,
|
||||
pub output_attributes: Vec<OutputAttributeRow>,
|
||||
}
|
||||
|
||||
26
src/web.rs
26
src/web.rs
@@ -24,7 +24,6 @@ use crate::{
|
||||
Storage,
|
||||
},
|
||||
templates::{
|
||||
FlakeListTemplate,
|
||||
FlakeTemplate,
|
||||
IndexTemplate,
|
||||
RevisionTemplate,
|
||||
@@ -85,34 +84,29 @@ pub fn make_router(storage: Storage) -> anyhow::Result<Router> {
|
||||
|
||||
let app = Router::new()
|
||||
.route("/", get(route_index))
|
||||
.route("/flakes", get(route_flakes))
|
||||
.route("/f/{uri}", get(route_flake))
|
||||
.route("/r/{revision_uri}", get(route_revision))
|
||||
.route("/flakes/{flake_uri}", get(route_flake))
|
||||
.route("/revisions/{revision_uri}", get(route_revision))
|
||||
.route("/derivation/{derivation_path}", get(route_revision))
|
||||
.with_state(state);
|
||||
|
||||
Ok(app)
|
||||
}
|
||||
|
||||
async fn route_index() -> Result<impl IntoResponse, AppError> {
|
||||
Ok(render_template(&IndexTemplate {})?)
|
||||
}
|
||||
|
||||
async fn route_flakes(
|
||||
async fn route_index(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
Ok(render_template(&FlakeListTemplate {
|
||||
Ok(render_template(&IndexTemplate {
|
||||
flakes: state.storage.flakes().await?,
|
||||
})?)
|
||||
}
|
||||
|
||||
async fn route_flake(
|
||||
State(state): State<AppState>,
|
||||
Path(uri): Path<String>,
|
||||
Path(flake_uri): Path<String>,
|
||||
) -> Result<impl IntoResponse, AppError> {
|
||||
Ok(render_template(&FlakeTemplate {
|
||||
uri: uri.clone(),
|
||||
revisions: state.storage.revisions_from_flake(&uri).await?,
|
||||
current_inputs: state.storage.current_inputs_for_flake(&uri).await?,
|
||||
flake_uri: flake_uri.clone(),
|
||||
revisions: state.storage.revisions_from_flake(&flake_uri).await?,
|
||||
})?)
|
||||
}
|
||||
|
||||
@@ -123,7 +117,7 @@ async fn route_revision(
|
||||
|
||||
Ok(render_template(&RevisionTemplate {
|
||||
revision_uri: revision_uri.clone(),
|
||||
inputs: state.storage.inputs_for_revision(&revision_uri).await?,
|
||||
input_of: state.storage.input_of_for_revision(&revision_uri).await?,
|
||||
revision: state.storage.revision(&revision_uri).await?,
|
||||
output_attributes: state.storage.output_attributes_for_revision(&revision_uri).await?,
|
||||
})?)
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
{% block content %}{% endblock %}
|
||||
</div>
|
||||
|
||||
<center><small><a href="/">Home</a> | <a href="/flakes">All flakes</a></small></center>
|
||||
<center><small><a href="/">Home</a></small></center>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
<h1>Flake details</h1>
|
||||
|
||||
<ul>
|
||||
<li>Flake: {{ uri }}</li>
|
||||
<li>Flake: {{ flake_uri }}</li>
|
||||
</ul>
|
||||
|
||||
<h2>Revisions</h2>
|
||||
@@ -15,22 +15,5 @@
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
<h2>Current Inputs</h2>
|
||||
|
||||
<ul>
|
||||
{% for input in current_inputs %}
|
||||
<li>
|
||||
{{ input.input_name }}
|
||||
<ul>
|
||||
{% match input.locked_flake_uri %}
|
||||
{% when Some with (locked_flake_uri) %}
|
||||
<li>Flake: <a href="{{ input.locked_flake_link() }}">{{ locked_flake_uri }}</a></li>
|
||||
{% when None %}
|
||||
{% endmatch %}
|
||||
</ul>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<h1>All flakes</h1>
|
||||
|
||||
<p>Displays all flakes explicitly scanned and automatically discovered by the tracker.</p>
|
||||
|
||||
<ul>
|
||||
{% for flake in flakes %}
|
||||
<li><a href="{{ flake.flake_link() }}">{{ flake.flake_uri }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
{% endblock %}
|
||||
@@ -4,7 +4,10 @@
|
||||
<h1>Flake Tracker</h1>
|
||||
|
||||
<ul>
|
||||
<li><a href="/flakes">All flakes</a></li>
|
||||
{% for flake in flakes %}
|
||||
<li><a href="{{ flake.flake_link() }}">{{ flake.flake_uri }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@@ -5,36 +5,14 @@
|
||||
|
||||
<ul>
|
||||
<li>Revision: {{ revision_uri }}</li>
|
||||
<li>Revision of: <a href="{# flake_revision.flake_link() #}">{# flake_revision.uri #}</a></a>
|
||||
<li>Revision of: <a href="{{ revision.flake_link() }}">{{ revision.flake_uri }}</a></a>
|
||||
</ul>
|
||||
|
||||
<h2>Inputs</h2>
|
||||
<h2>Outputs</h2>
|
||||
|
||||
<ul>
|
||||
{% for input in inputs %}
|
||||
<li>
|
||||
{{ input.input_name }}
|
||||
<ul>
|
||||
{% match input.locked_flake_uri %}
|
||||
{% when Some with (locked_flake_uri) %}
|
||||
<li>Flake: <a href="{{ input.locked_flake_link() }}">{{ locked_flake_uri }}</a></li>
|
||||
{% when None %}
|
||||
{% endmatch %}
|
||||
{% match input.locked_revision_uri %}
|
||||
{% when Some with (locked_revision_uri) %}
|
||||
<li>Revision: <a href="{{ input.locked_revision_link() }}">{{ locked_revision_uri }}</a></li>
|
||||
{% when None %}
|
||||
{% endmatch %}
|
||||
</ul>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
<h2>Input of</h2>
|
||||
|
||||
<ul>
|
||||
{% for input in input_of %}
|
||||
<li><a href="{{ input.revision_link() }}">{{ input.revision_uri }}</a></li>
|
||||
{% for output in output_attributes %}
|
||||
<li>{{ output.output_attribute_name }}: <a href="{{ output.derivation_link() }}">{{ output.derivation_path }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endblock %}
|
||||
|
||||
Reference in New Issue
Block a user