Add create binary cache path
Build nomnom / Build-NomNom (push) Failing after 5m19s Details

It's not 100% complete yet, we're still displaying dummy data on the
homepage.

The auth path seems to be super verbose. But I'm a bit unwilling to
use a middleware to perform it: from my experience it makes easy to
screw up the auth model :/

I might revisit this choice later on.
This commit is contained in:
Félix Baylac Jacqué 2023-12-13 17:36:25 +01:00
parent c760345156
commit ebd390987f
14 changed files with 1050 additions and 56 deletions

921
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -15,11 +15,13 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
handlebars = "4.3.6"
deadpool-postgres = "0.11.0"
tokio-postgres = { version = "0.7.10", features = ["with-uuid-1", "with-serde_json-1"] }
tokio-postgres = { version = "0.7.10", features = ["with-uuid-1", "with-serde_json-1", "with-chrono-0_4"] }
postgres-types = { version = "*", features = ["derive"] }
anyhow = "1.0.75"
refinery = { version = "0.8.11", features = ["tokio-postgres"] }
uuid = { version = "1.4.1", features = ["v4"] }
chrono = { version = "*", features = ["serde"] }
aws-sdk-s3 = "1.6.0"
[dependencies.heck]
version = "0.4.1"

View File

@ -10,5 +10,5 @@ pkgs.rustPlatform.buildRustPackage {
];
buildInputs = [ pkgs.openssl ];
src = pkgs.lib.cleanSource ./.;
cargoHash = "sha256-0891xprAumfRvODg0Al6PnQ0EfWNgX+VUINjbutFilg=";
cargoHash = "sha256-ct90VmvzUIJ8d0LO7jz5/kcC3M0rU85DbX81/67T2R4=";
}

View File

@ -22,7 +22,8 @@ CREATE TABLE BinaryCaches (
name text NOT NULL,
access_key text NOT NULL,
secret_key text NOT NULL,
region text NOT NULL
region text NOT NULL,
endpoint text NOT NULL
);
CREATE TABLE Projects (
@ -44,8 +45,11 @@ CREATE TABLE Closures (
id SERIAL PRIMARY KEY NOT NULL,
project_id integer NOT NULL,
objects text[] NOT NULL,
date timestamp NOT NULL,
CONSTRAINT fk_project_closure FOREIGN KEY (project_id) REFERENCES Projects(id)
);
-- We'll mostly querying the Keys using the associated uid.
CREATE INDEX idx_keys_uid ON Keys USING HASH (user_id);
-- We'll be often sorting Closures through their datetime.
CREATE INDEX idx_date_closures ON Closures USING HASH (date);

4
pg_dump Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
source /tmp/nom-nom-dev-args
pg_dump -h "${host}" -p "${port}" -d "${dbname}" > dump.sql

2
psql
View File

@ -1,4 +1,4 @@
#!/usr/bin/env bash
source /tmp/nom-nom-dev-args
psql -h "${host}" -p "${port}" -d "${dbname}"
psql -h "${host}" -p "${port}" -d "${dbname}" "$@"

View File

@ -12,6 +12,7 @@ trap 'rm -rf ${dbdir}' EXIT
initdb "$dbdir"
postgres -D "${dbdir}" -c unix_socket_directories="${dbdir}" -c listen_addresses= -c port="${port}" &
pgpid=$!
# Trick to help the "./psql" script to find the DB dir & co
cat <<EOF > "/tmp/nom-nom-dev-args"
export host="$dbdir"
@ -23,7 +24,7 @@ EOF
trap 'rm -rf ${dbdir} && rm /tmp/nom-nom-dev-args && kill ${pgpid}' EXIT
# Yeah, this is very meh. We need to wait for the server to be ready
#to receive requests to create the DB.
# to receive requests to create the DB.
sleep 2
createdb -h "${dbdir}" -p "${port}" "${dbname}"
@ -36,4 +37,12 @@ cat <<EOF > "${cfgfile}"
}
EOF
# Check if there's a dump to seed the DB.
if [ -f dump.sql ]; then
echo ""
echo "[+] Seeding the DB with dump.sql"
echo ""
./psql -f dump.sql
fi
cargo run --bin nom-nom-gc-server -- --bind "[::1]:8001" --config "${cfgfile}"

View File

@ -0,0 +1,33 @@
use actix_web::{web::{self, Form}, HttpResponse, http::header::{ContentType, self}, Responder, HttpRequest};
use crate::{models::{AppState, BinaryCache}, templates};
use super::check_authentication;
pub async fn new_binary_cache(app_state: web::Data<AppState<'_>>, req: HttpRequest) -> impl Responder {
if check_authentication(&app_state, req).await.is_ok() {
let response = templates::new_binary_cache(app_state.hbs.clone()).unwrap();
HttpResponse::Ok()
.content_type(ContentType::html())
.body(response)
} else {
HttpResponse::SeeOther()
.content_type(ContentType::plaintext())
.append_header((header::LOCATION, "/login"))
.body("please log in")
}
}
pub async fn new_binary_cache_post(app_state: web::Data<AppState<'_>>, req: HttpRequest, form: Form<BinaryCache>) -> impl Responder {
if check_authentication(&app_state, req).await.is_ok() {
app_state.create_binary_cache(&form).await.unwrap();
HttpResponse::SeeOther()
.content_type(ContentType::plaintext())
.append_header((header::LOCATION, "/"))
.body("new binary cache created")
} else {
HttpResponse::Forbidden()
.finish()
}
}

View File

@ -1,15 +1,31 @@
use actix_web::{HttpResponse, http::header::{ContentType, self}, web, HttpRequest, cookie::{Cookie, SameSite}};
use chrono::Local;
use uuid::Uuid;
use crate::{models::{AppState, SessionUuid, User}, templates};
use crate::{models::{AppState, SessionUuid, User, ProjectSummary}, templates};
pub mod authentication;
pub mod binary_cache;
pub use authentication::*;
pub use binary_cache::*;
pub async fn landing_page (app_state: web::Data<AppState<'_>>, req: HttpRequest) -> HttpResponse {
pub async fn landing_page (app_state: web::Data<AppState<'_>>) -> HttpResponse {
let summaries: Vec<ProjectSummary> = vec![
ProjectSummary {
name: "Test Project".to_string(),
latest_closure: "/nix/store/blabla".to_string(),
latest_closure_datetime: Local::now(),
}
];
let content: String = templates::landing_page(app_state.hbs.clone(), true, summaries).unwrap();
HttpResponse::Ok()
.content_type(ContentType::html())
.body(content)
/*
match check_authentication(app_state.clone(), req).await {
Ok(_) => {
let content: String = templates::landing_page(app_state.hbs.clone(), true).unwrap();
HttpResponse::Ok()
.content_type(ContentType::html())
@ -17,13 +33,14 @@ pub async fn landing_page (app_state: web::Data<AppState<'_>>, req: HttpRequest)
},
Err(redirect) => redirect,
}
*/
}
/**
Checks if the user requesting the page is authenticated.
*/
async fn check_authentication(app_state: web::Data<AppState<'_>>, req: HttpRequest) -> Result<User, HttpResponse> {
async fn check_authentication(app_state: &web::Data<AppState<'_>>, req: HttpRequest) -> Result<User, HttpResponse> {
fn redirect_to_login<T>(e: T, req: &HttpRequest) -> HttpResponse
where T: ToString

View File

@ -2,6 +2,7 @@ use std::collections::HashMap;
use std::fs;
use std::ops::DerefMut;
use std::sync::Arc;
use chrono::{DateTime, Local};
use postgres_types::{FromSql, ToSql};
use url::Url;
@ -84,12 +85,13 @@ pub struct Key {
pub key: Passkey
}
#[derive(Clone, Debug, Eq, PartialEq)]
#[derive(Clone, Debug, Eq, PartialEq, Deserialize)]
pub struct BinaryCache {
pub name: String,
pub access_key: String,
pub secret_key: String,
pub region: String
pub region: String,
pub endpoint: String
}
#[derive(Clone, Debug, Eq, PartialEq)]
@ -97,6 +99,13 @@ pub struct Project {
pub name: String,
}
#[derive(Serialize, Clone, Debug, Eq, PartialEq)]
pub struct ProjectSummary {
pub name: String,
pub latest_closure: String,
pub latest_closure_datetime: DateTime<Local>
}
impl AppState<'_> {
pub fn new(conf: Configuration) -> Self {
let rp = "localhost";
@ -188,8 +197,8 @@ impl AppState<'_> {
pub async fn create_binary_cache(&self, binary_cache: &BinaryCache) -> Result<()> {
let conn = self.db.get().await?;
let stmt = conn.prepare_cached("INSERT INTO BinaryCaches (name, access_key, secret_key, region) VALUES ($1, $2, $3, $4)").await?;
let _ = conn.execute(&stmt, &[&binary_cache.name, &binary_cache.access_key, &binary_cache.secret_key, &binary_cache.region]).await?;
let stmt = conn.prepare_cached("INSERT INTO BinaryCaches (name, access_key, secret_key, region, endpoint) VALUES ($1, $2, $3, $4, $5)").await?;
let _ = conn.execute(&stmt, &[&binary_cache.name, &binary_cache.access_key, &binary_cache.secret_key, &binary_cache.region, &binary_cache.endpoint]).await?;
Ok(())
}
@ -223,4 +232,17 @@ impl AppState<'_> {
})
}
pub async fn get_project_summaries(&self) -> Result<Vec<ProjectSummary>> {
let conn = self.db.get().await?;
let stmt = conn.prepare_cached("SELECT p.name, p FROM Projects p \
INNER JOIN Closures c ON c.project_id = p.id").await?;
let rows = conn.query(&stmt, &[]).await?;
Ok(
rows.into_iter().map(|r| ProjectSummary {
name: r.get(0),
latest_closure: r.get(1),
latest_closure_datetime: r.get(2)
}).collect()
)
}
}

View File

@ -38,6 +38,8 @@ async fn main() -> std::io::Result<()> {
.route("/login", web::get().to(handlers::webauthn_login))
.route("/login/init", web::post().to(handlers::webauthn_login_init))
.route("/login/finish", web::post().to(handlers::webauthn_login_finish))
.route("/binary-cache/new", web::get().to(handlers::new_binary_cache))
.route("/binary-cache/new", web::post().to(handlers::new_binary_cache_post))
})
.bind(addr)
.unwrap()

View File

@ -1,4 +1,23 @@
{{#> template }}
<p>Hello world, this is the nom nom S3 GC</p>
<p>Logged? {{ logged }}</p>
<h2>Binary Caches</h2>
<a href="/binary-cache/new">New Binary Cache</a>
{{#each binaryCaches}}
<div class="binary-cache">
<h3>{{this.name}}</h3>
<table>
<tr>
<th>Project Name</th>
<th>Latest Closure</th>
<th>Datetime</th>
</tr>
{{#each this.projects}}
<tr>
<td>{{this.name}}</td>
<td>{{this.latestClosure}}</td>
<td>{{this.datetime}}</td>
</tr>
{{/each}}
</table>
</div>
{{/each}}
{{ /template }}

View File

@ -4,7 +4,7 @@ use handlebars::Handlebars;
use std::{path::PathBuf, sync::Arc};
use crate::models::RegistrationUuid;
use crate::models::{RegistrationUuid, ProjectSummary};
pub fn new<'a>() -> Result<Handlebars<'a>, RenderError> {
let rootpath = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
@ -16,6 +16,7 @@ pub fn new<'a>() -> Result<Handlebars<'a>, RenderError> {
let webauthn_register_js = rootpath.join("src/templates/webauthn-register.js");
let webauthn_login_js = rootpath.join("src/templates/webauthn-login.js");
let login = rootpath.join("src/templates/login.hbs");
let new_binary_cache_form = rootpath.join("src/templates/new-binary-cache.hbs");
hbs.register_template_file("landing", landing_path.to_str().unwrap())?;
hbs.register_template_file("template", template_path.to_str().unwrap())?;
@ -24,17 +25,28 @@ pub fn new<'a>() -> Result<Handlebars<'a>, RenderError> {
hbs.register_template_file("webauthn-login-js", webauthn_login_js.to_str().unwrap())?;
hbs.register_template_file("register-user", register_user.to_str().unwrap())?;
hbs.register_template_file("login", login.to_str().unwrap())?;
hbs.register_template_file("new-binary-cache-form", new_binary_cache_form.to_str().unwrap())?;
Ok(hbs)
}
pub fn landing_page(hb: Arc<Handlebars<'_>>, logged: bool) -> Result<String, RenderError> {
pub fn landing_page(hb: Arc<Handlebars<'_>>, logged: bool, project_summaries: Vec<ProjectSummary>) -> Result<String, RenderError> {
let data = json!({
"logged": logged
});
"binaryCaches": [{
"name": "NixOS Binary Cache",
"projects": project_summaries.into_iter().map(|p| json!({
"name": p.name,
"latestClosure": p.latest_closure,
"datetime": p.latest_closure_datetime.to_string()
}
)).collect::<Vec<_>>()
}]});
hb.render("landing", &data)
}
/**
Generates a webauthn challenge to initiate a fido key enrolling procedure.
*/
pub fn register_user_start(hb: Arc<Handlebars<'_>>, registration_uuid: RegistrationUuid, username: String, keyids: Vec<String>) -> Result<String, RenderError> {
let js_data = json!({
"registration-uuid": &registration_uuid.0.to_string(),
@ -48,8 +60,7 @@ pub fn register_user_start(hb: Arc<Handlebars<'_>>, registration_uuid: Registrat
hb.render("register-user", &data)
}
pub fn login(hb: Arc<Handlebars<'_>>, ) -> Result<String, RenderError> {
pub fn login(hb: Arc<Handlebars<'_>>) -> Result<String, RenderError> {
let js_data = json!({
});
let js = hb.render("webauthn-login-js", &js_data)?;
@ -58,3 +69,9 @@ pub fn login(hb: Arc<Handlebars<'_>>, ) -> Result<String, RenderError> {
});
hb.render("login", &data)
}
pub fn new_binary_cache(hb: Arc<Handlebars<'_>>) -> Result<String, RenderError> {
let data = json!({
});
hb.render("new-binary-cache-form", &data)
}

View File

@ -0,0 +1,16 @@
{{#> template }}
<h1>New Binary Cache</h1>
<form name="binary-cache" method="post" action="/binary-cache/new">
<label>Bucket Name</label>
<input name="name"/>
<label>Bucket Access Key</label>
<input name="access_key"/>
<label>Bucket Secret Key</label>
<input name="secret_key"/>
<label>Bucket Region</label>
<input name="region"/>
<label>Bucket Endpoint</label>
<input name="endpoint"/>
<input type="submit" value="Create binary cache"/>
</form>
{{ /template }}