Skip to content

Commit

Permalink
resolve comments
Browse files Browse the repository at this point in the history
  • Loading branch information
darknight committed Nov 23, 2023
1 parent 0ef3186 commit b989ee7
Show file tree
Hide file tree
Showing 5 changed files with 99 additions and 68 deletions.
20 changes: 11 additions & 9 deletions crates/tabby-common/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,17 @@ impl RepositoryConfig {
let path = self.git_url.strip_prefix("file://").unwrap();
path.into()
} else {
repositories_dir().join(to_filename(&self.git_url))
repositories_dir().join(self.name())
}
}

pub fn is_local_dir(&self) -> bool {
self.git_url.starts_with("file://")
}

pub fn name(&self) -> String {
filenamify(&self.git_url)
}
}

#[derive(Serialize, Deserialize)]
Expand All @@ -73,13 +77,9 @@ impl Default for ServerConfig {
}
}

pub fn to_filename<S: AsRef<str>>(s: S) -> String {
filenamify(s)
}

#[cfg(test)]
mod tests {
use super::{to_filename, Config, RepositoryConfig};
use super::{Config, RepositoryConfig};

#[test]
fn it_parses_empty_config() {
Expand All @@ -102,8 +102,10 @@ mod tests {
}

#[test]
fn test_to_filename() {
let url = "https://github.com/TabbyML/tabby.git".to_string();
assert_eq!(to_filename(url), "https_github.com_TabbyML_tabby.git");
fn test_repository_config_name() {
let repo = RepositoryConfig {
git_url: "https://github.com/TabbyML/tabby.git".to_owned(),
};
assert_eq!(repo.name(), "https_github.com_TabbyML_tabby.git");
}
}
5 changes: 1 addition & 4 deletions crates/tabby-common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,14 @@ use std::{
path::PathBuf,
};

pub use config::to_filename;
use path::dataset_dir;
use serde::{Deserialize, Serialize};
use serde_jsonlines::JsonLinesReader;

#[derive(Serialize, Deserialize, Clone, Debug)]
#[derive(Serialize, Deserialize)]
pub struct SourceFile {
pub git_url: String,
pub filepath: String,
#[serde(skip_serializing_if = "String::is_empty")]
#[serde(default)]
pub content: String,
pub language: String,
pub max_line_length: usize,
Expand Down
14 changes: 1 addition & 13 deletions ee/tabby-webserver/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@ use schema::Schema;
use server::ServerContext;
use tarpc::server::{BaseChannel, Channel};

use crate::repositories::repo::load_dataset;

pub async fn attach_webserver(
api: Router,
ui: Router,
Expand All @@ -53,7 +51,7 @@ pub async fn attach_webserver(
.route("/graphql", routing::get(playground("/graphql", None)))
.layer(Extension(schema))
.route("/hub", routing::get(ws_handler).with_state(ctx))
.nest("/repositories", repositories_routers().await);
.nest("/repositories", repositories::routers());

let ui = ui
.route("/graphiql", routing::get(graphiql("/graphql", None)))
Expand All @@ -62,16 +60,6 @@ pub async fn attach_webserver(
(api, ui)
}

async fn repositories_routers() -> Router {
load_dataset().await.unwrap();

Router::new()
.route("/:name/resolve/", routing::get(repositories::resolve))
.route("/:name/resolve/*path", routing::get(repositories::resolve))
.route("/:name/meta/", routing::get(repositories::meta))
.route("/:name/meta/*path", routing::get(repositories::meta))
}

async fn distributed_tabby_layer(
State(ws): State<Arc<ServerContext>>,
request: Request<Body>,
Expand Down
33 changes: 20 additions & 13 deletions ee/tabby-webserver/src/repositories.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,25 @@
pub(crate) mod repo;
mod resolve;

use anyhow::Result;
use axum::{extract::Path, http::StatusCode, response::Response, Json};
use tabby_common::{path::repositories_dir, SourceFile};
use tracing::{debug, instrument, warn};
use axum::{extract::Path, http::StatusCode, response::Response, routing, Json, Router};
use tabby_common::path::repositories_dir;
use tracing::{instrument, warn};

use crate::repositories::repo::{resolve_dir, resolve_file, Repository, DATASET};
use crate::{
repositories,
repositories::resolve::{resolve_dir, resolve_file, resolve_meta, Meta, Repository},
};

pub fn routers() -> Router {
Router::new()
.route("/:name/resolve/", routing::get(repositories::resolve))
.route("/:name/resolve/*path", routing::get(repositories::resolve))
.route("/:name/meta/", routing::get(repositories::meta))
.route("/:name/meta/*path", routing::get(repositories::meta))
}

#[instrument(skip(repo))]
pub async fn resolve(Path(repo): Path<Repository>) -> Result<Response, StatusCode> {
debug!("repo: {:?}", repo);
async fn resolve(Path(repo): Path<Repository>) -> Result<Response, StatusCode> {
let root = repositories_dir().join(repo.name_str());
let full_path = root.join(repo.path_str());
let is_dir = tokio::fs::metadata(full_path.clone())
Expand Down Expand Up @@ -37,13 +47,10 @@ pub async fn resolve(Path(repo): Path<Repository>) -> Result<Response, StatusCod
}

#[instrument(skip(repo))]
pub async fn meta(Path(repo): Path<Repository>) -> Result<Json<SourceFile>, StatusCode> {
debug!("repo: {:?}", repo);
async fn meta(Path(repo): Path<Repository>) -> Result<Json<Meta>, StatusCode> {
let key = repo.dataset_key();
if let Some(dataset) = DATASET.get() {
if let Some(file) = dataset.get(&key) {
return Ok(Json(file.clone()));
}
if let Some(resp) = resolve_meta(&key) {
return Ok(Json(resp));
}
Err(StatusCode::NOT_FOUND)
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,18 @@ use axum::{
Json,
};
use hyper::Body;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use tabby_common::{to_filename, SourceFile};
use tokio::sync::OnceCell;
use tabby_common::{config::Config, SourceFile, Tag};
use tower::ServiceExt;
use tower_http::services::ServeDir;
use tracing::error;

pub(crate) static DATASET: OnceCell<HashMap<DatasetKey, SourceFile>> = OnceCell::const_new();
lazy_static! {
static ref META: HashMap<DatasetKey, Meta> = load_meta();
}

const MIME_VENDOR: &str = "application/vnd.directory+json";
const DIRECTORY_MIME_TYPE: &str = "application/vnd.directory+json";

#[derive(Hash, PartialEq, Eq, Debug)]
pub struct DatasetKey {
Expand Down Expand Up @@ -53,34 +55,62 @@ struct ListDir {
entries: Vec<String>,
}

/// Load dataset
pub async fn load_dataset() -> Result<()> {
// `SourceFile::all()` depends on `std::io`, so it's blocking.
// We need to spawn a blocking task dedicated for such scenario.
let dataset = tokio::task::spawn_blocking(|| {
let mut dataset = HashMap::new();
let iter = match SourceFile::all() {
Ok(all) => all,
Err(err) => {
error!("load dataset: {}", err);
return dataset;
}
};
for mut file in iter {
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Meta {
git_url: String,
filepath: String,
language: String,
max_line_length: usize,
avg_line_length: f32,
alphanum_fraction: f32,
tags: Vec<Tag>,
}

impl From<SourceFile> for Meta {
fn from(file: SourceFile) -> Self {
Self {
git_url: file.git_url,
filepath: file.filepath,
language: file.language,
max_line_length: file.max_line_length,
avg_line_length: file.avg_line_length,
alphanum_fraction: file.alphanum_fraction,
tags: file.tags,
}
}
}

/// TODO: implement auto reloading logic in future (so changes produced by tabby-scheduler command will be loaded)
fn load_meta() -> HashMap<DatasetKey, Meta> {
let mut dataset = HashMap::new();
let repo_conf = match Config::load() {
Ok(config) => config
.repositories
.into_iter()
.map(|repo| (repo.git_url.clone(), repo))
.collect::<HashMap<_, _>>(),
Err(err) => {
error!("load config: {}", err);
return dataset;
}
};
let iter = match SourceFile::all() {
Ok(all) => all,
Err(err) => {
error!("load dataset: {}", err);
return dataset;
}
};
for file in iter {
if let Some(name) = repo_conf.get(&file.git_url).map(|repo| repo.name()) {
let key = DatasetKey {
local_name: to_filename(file.git_url.as_str()),
local_name: name,
rel_path: file.filepath.clone(),
};
// exclude content from response data
file.content = "".to_string();
dataset.insert(key, file);
dataset.insert(key, file.into());
}
dataset
})
.await?;

DATASET.set(dataset)?;
Ok(())
}
dataset
}

/// Resolve a directory
Expand All @@ -100,7 +130,7 @@ pub async fn resolve_dir(root: PathBuf, full_path: PathBuf) -> Result<Response>

let body = Json(ListDir { entries }).into_response();
let resp = Response::builder()
.header(header::CONTENT_TYPE, MIME_VENDOR)
.header(header::CONTENT_TYPE, DIRECTORY_MIME_TYPE)
.body(body.into_body())?;

Ok(resp)
Expand All @@ -120,3 +150,10 @@ pub async fn resolve_file(root: PathBuf, repo: &Repository) -> Result<Response>

Ok(resp.map(boxed))
}

pub fn resolve_meta(key: &DatasetKey) -> Option<Meta> {
if let Some(meta) = META.get(key) {
return Some(meta.clone());
}
None
}

0 comments on commit b989ee7

Please sign in to comment.