Skip to content

Commit

Permalink
fly.io deployments, generate TLS certs (in prod), less deps
Browse files Browse the repository at this point in the history
  • Loading branch information
billyb2 committed Apr 21, 2024
1 parent bfb18cf commit 5d73989
Show file tree
Hide file tree
Showing 8 changed files with 753 additions and 113 deletions.
465 changes: 420 additions & 45 deletions Cargo.lock

Large diffs are not rendered by default.

12 changes: 8 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ version = "0.1.0"
edition = "2021"

[profile.release]
lto = "fat"
debug = true
# TODO re-enable this when we don't do 50 deploys a day
#lto = "fat"

[dependencies]
anyhow = { version = "1" }
Expand All @@ -23,8 +23,12 @@ rand = "0.8"
wtransport = { version = "0.1" }
#bfsp = { path = "../bfsp" }
bfsp = { git = "https://github.com/Billy-s-E2EE-File-Server/bfsp.git" }
async-trait = { version = "0.1" }
futures = { version = "0.3", features = ["executor"] }
thiserror = "1"
biscuit-auth = { version = "4" }
serde = { version = "1" }

# TLS cert stuff
instant-acme = "0.4"
rcgen = "0.12.1"
warp = "0.3"
serde_json = "1"
9 changes: 7 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,20 +1,25 @@
FROM nixos/nix:latest as builder

COPY src /tmp/build/src
COPY flake.nix flake.lock /tmp/build/
COPY rust-toolchain.toml /tmp/build/
COPY Cargo.toml Cargo.lock /tmp/build/
COPY migrations /tmp/build/migrations

WORKDIR /tmp/build

RUN nix --extra-experimental-features "nix-command flakes" --option filter-syscalls false build '.#build-deps'

COPY src /tmp/build/src

RUN nix --extra-experimental-features "nix-command flakes" --option filter-syscalls false build '.?submodules=1'

RUN mkdir -p /tmp/nix-store-closure
RUN cp -R $(nix-store -qR result/) /tmp/nix-store-closure

FROM scratch
FROM debian:bookworm-slim
WORKDIR /app
RUN apt-get update && apt-get install ca-certificates -y
RUN update-ca-certificates -f
COPY --from=builder /tmp/nix-store-closure /nix/store
COPY --from=builder /tmp/build/result /app
COPY migrations /app/migrations
Expand Down
22 changes: 15 additions & 7 deletions flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,26 @@
rustToolchain =
pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
craneLib = crane.lib.${system}.overrideToolchain rustToolchain;

src = craneLib.cleanCargoSource (craneLib.path ./.);
buildInputs = with pkgs;
[ clang_15 libsodium protobuf ]
++ pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.libiconv ];
cargoArtifacts = craneLib.buildDepsOnly {
inherit src;
buildInputs = buildInputs;
};

my-crate = craneLib.buildPackage {
src = craneLib.cleanCargoSource (craneLib.path ./.);
inherit cargoArtifacts src;
cargoVendorDir =
craneLib.vendorCargoDeps { cargoLock = ./Cargo.lock; };

buildInputs = with pkgs;
[ clang_15 libsodium protobuf ]
++ pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.libiconv ];

buildInputs = buildInputs;
};
in {
packages.default = my-crate;
packages.build-deps = cargoArtifacts;

devShells.default = craneLib.devShell {
# Automatically inherit any build inputs from `my-crate`
Expand All @@ -49,8 +57,8 @@
protolint
sqlx-cli
rust-analyzer
openssl
xxd
openssl
xxd
];
};
});
Expand Down
12 changes: 9 additions & 3 deletions fly.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,22 @@
app = 'encrypted-file-server'
primary_region = 'ord'

[build]

[[services]]
protocol = "udp"
internal_port = 9999
auto_stop_machines = true
auto_start_machines = true
min_machines_running = 0
min_machines_running = 1
processes = ['app']
[[services.ports]]
port = 9999

[http_service]
internal_port = 80
force_https = false

[env]
RUST_BACKTRACE="1"
TOKEN_PUBLIC_KEY="d27fb9c11d7608f86aa9e90a00133d58688b2fe4e7903a35199a25f7e905f658"

[[vm]]
Expand Down
50 changes: 25 additions & 25 deletions src/db.rs
Original file line number Diff line number Diff line change
@@ -1,51 +1,57 @@
use std::{
collections::{HashMap, HashSet},
env,
future::Future,
};

use anyhow::Result;
use async_trait::async_trait;
use bfsp::{ChunkHash, ChunkID, ChunkMetadata, EncryptedFileMetadata};
use log::debug;
use sqlx::{Execute, PgPool, QueryBuilder, Row};
use thiserror::Error;

#[async_trait]
pub trait ChunkDatabase: Sized {
pub trait MetaDB: Sized {
type InsertChunkError: std::error::Error;

async fn new() -> Result<Self>;
async fn contains_chunk(&self, chunk_id: ChunkID, user_id: i64) -> Result<bool>;
async fn insert_chunk(
fn new() -> impl Future<Output = Result<Self>> + Send;
fn contains_chunk(
&self,
chunk_id: ChunkID,
user_id: i64,
) -> impl Future<Output = Result<bool>> + Send;
fn insert_chunk(
&self,
chunk_meta: ChunkMetadata,
user_id: i64,
) -> std::result::Result<(), InsertChunkError>;
) -> impl Future<Output = std::result::Result<(), InsertChunkError>> + Send;
// TODO: add a funtion to get multiple chunsk
async fn get_chunk_meta(
fn get_chunk_meta(
&self,
chunk_id: ChunkID,
user_id: i64,
) -> Result<Option<ChunkMetadata>>;
async fn delete_chunks(&self, chunk_ids: &HashSet<ChunkID>) -> Result<()>;
async fn insert_file_meta(
) -> impl Future<Output = Result<Option<ChunkMetadata>>> + Send;
fn delete_chunks(
&self,
chunk_ids: &HashSet<ChunkID>,
) -> impl Future<Output = Result<()>> + Send;
fn insert_file_meta(
&self,
enc_metadata: EncryptedFileMetadata,
user_id: i64,
) -> Result<()>;
async fn get_file_meta(
) -> impl Future<Output = Result<()>> + Send;
fn get_file_meta(
&self,
meta_id: i64,
user_id: i64,
) -> Result<Option<EncryptedFileMetadata>>;
async fn list_file_meta(
) -> impl Future<Output = Result<Option<EncryptedFileMetadata>>> + Send;
fn list_file_meta(
&self,
meta_ids: HashSet<i64>,
user_id: i64,
) -> Result<HashMap<i64, EncryptedFileMetadata>>;
) -> impl Future<Output = Result<HashMap<i64, EncryptedFileMetadata>>> + Send;
}

pub struct PostgresDB {
pub struct PostgresMetaDB {
pool: PgPool,
}

Expand All @@ -57,8 +63,7 @@ pub enum InsertChunkError {
DatabaseError(#[from] sqlx::Error),
}

#[async_trait]
impl ChunkDatabase for PostgresDB {
impl MetaDB for PostgresMetaDB {
type InsertChunkError = InsertChunkError;

async fn new() -> Result<Self> {
Expand All @@ -68,12 +73,7 @@ impl ChunkDatabase for PostgresDB {
)
.await?;

sqlx::migrate!()
.run(&pool)
.await
.map_err(|err| anyhow::anyhow!("Error running database migrations: {err:?}"))?;

Ok(PostgresDB { pool })
Ok(PostgresMetaDB { pool })
}

async fn contains_chunk(&self, chunk_id: ChunkID, user_id: i64) -> Result<bool> {
Expand Down
Loading

0 comments on commit 5d73989

Please sign in to comment.