Skip to content

Commit

Permalink
custom error type + better error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
matt-codecov committed Apr 26, 2024
1 parent 0f7f294 commit fd14014
Show file tree
Hide file tree
Showing 12 changed files with 187 additions and 171 deletions.
3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ lazy_static = "1.4.0"
strum = "0.26.1"
strum_macros = "0.26.1"

anyhow = "1.0.82"
thiserror = "1.0.59"

winnow = "0.5.34"

serde_json = "=1.0.1"
Expand Down
4 changes: 2 additions & 2 deletions examples/parse_pyreport.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::{env, fs::File, path::PathBuf};

use codecov_rs::parsers::pyreport_shim::parse_pyreport;
use codecov_rs::{error::Result, parsers::pyreport_shim::parse_pyreport};

fn usage_error() -> ! {
println!("Usage:");
Expand All @@ -12,7 +12,7 @@ fn usage_error() -> ! {
std::process::exit(1);
}

pub fn main() -> Result<(), std::io::Error> {
pub fn main() -> Result<()> {
let args: Vec<String> = env::args().collect();

if args.len() != 4 {
Expand Down
19 changes: 19 additions & 0 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
use thiserror::Error;

pub type Result<O> = std::result::Result<O, CodecovError>;

#[derive(Error, Debug)]
pub enum CodecovError {
#[error("sqlite failure: '{0}'")]
SqliteError(#[from] rusqlite::Error),

#[error("sqlite migration failure: '{0}'")]
SqliteMigrationError(#[from] rusqlite_migration::Error),

// Can't use #[from]
#[error("parser error: '{0}'")]
ParserError(winnow::error::ContextError),

#[error("io error: '{0}'")]
IOError(#[from] std::io::Error),
}
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@
pub mod report;

pub mod parsers;

pub mod error;
8 changes: 4 additions & 4 deletions src/parsers/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ pub use serde_json::{
use winnow::{
ascii::float,
combinator::{alt, delimited, opt, preceded, repeat, separated, separated_pair},
error::ContextError,
error::{ContextError, ErrMode, ErrorKind, ParserError},
stream::Stream,
token::none_of,
PResult, Parser,
Expand Down Expand Up @@ -48,7 +48,9 @@ pub fn parse_char<S: StrStream>(buf: &mut S) -> PResult<char> {
let c = none_of('"').parse_next(buf);
match c {
Ok('\\') => {
let escaped = buf.next_token().unwrap(); // TODO handle error
let escaped = buf
.next_token()
.ok_or_else(|| ErrMode::from_error_kind(buf, ErrorKind::Token))?;
match escaped {
'"' | '\'' | '\\' => Ok(escaped),
'n' => Ok('\n'),
Expand Down Expand Up @@ -159,8 +161,6 @@ pub fn specific_key<S: StrStream>(key: &str) -> impl Parser<S, String, ContextEr

#[cfg(test)]
mod tests {
use winnow::error::ErrMode;

use super::*;

#[test]
Expand Down
19 changes: 12 additions & 7 deletions src/parsers/pyreport_shim.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
use std::{fs::File, path::PathBuf};

use memmap2::Mmap;
use winnow::Parser;

use crate::{
error::{CodecovError, Result},
parsers::ReportBuilderCtx,
report::{ReportBuilder, SqliteReport, SqliteReportBuilder},
};
Expand Down Expand Up @@ -36,8 +38,8 @@ pub fn parse_pyreport(
report_json_file: &File,
chunks_file: &File,
out_path: PathBuf,
) -> Result<SqliteReport, std::io::Error> {
let report_builder = SqliteReportBuilder::new(out_path);
) -> Result<SqliteReport> {
let report_builder = SqliteReportBuilder::new(out_path)?;

// Memory-map the input file so we don't have to read the whole thing into RAM
let mmap_handle = unsafe { Mmap::map(report_json_file)? };
Expand All @@ -46,9 +48,10 @@ pub fn parse_pyreport(
input: buf,
state: ReportBuilderCtx::new(report_builder),
};
// TODO handle error
let (files, sessions) =
report_json::parse_report_json(&mut stream).expect("Failed to parse report JSON");
let (files, sessions) = report_json::parse_report_json
.parse_next(&mut stream)
.map_err(|e| e.into_inner().unwrap_or_default())
.map_err(CodecovError::ParserError)?;

// Replace our mmap handle so the first one can be unmapped
let mmap_handle = unsafe { Mmap::map(chunks_file)? };
Expand All @@ -60,8 +63,10 @@ pub fn parse_pyreport(
input: buf,
state: chunks_ctx,
};
// TODO handle error
chunks::parse_chunks_file(&mut chunks_stream).expect("Failed to parse chunks file");
chunks::parse_chunks_file
.parse_next(&mut chunks_stream)
.map_err(|e| e.into_inner().unwrap_or_default())
.map_err(CodecovError::ParserError)?;

// Build and return the `SqliteReport`
Ok(chunks_stream.state.db.report_builder.build())
Expand Down
9 changes: 5 additions & 4 deletions src/parsers/pyreport_shim/chunks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use winnow::{
combinator::{
alt, delimited, eof, opt, peek, preceded, separated, separated_pair, seq, terminated,
},
error::{ContextError, ErrMode},
error::{ContextError, ErrMode, ErrorKind, FromExternalError},
stream::Stream,
PResult, Parser, Stateful,
};
Expand Down Expand Up @@ -433,7 +433,7 @@ pub fn label<S: StrStream, R: Report, B: ReportBuilder<R>>(
.db
.report_builder
.insert_context(ContextType::TestCase, &labels_index_key)
.unwrap();
.map_err(|e| ErrMode::from_external_error(buf, ErrorKind::Fail, e))?;
buf.state.labels_index.insert(context.name, context.id);
Ok(labels_index_key)
}
Expand Down Expand Up @@ -600,7 +600,8 @@ where
line_session.coverage = correct_coverage;
}

utils::save_report_line(&report_line, &mut buf.state).expect("error saving report line");
utils::save_report_line(&report_line, &mut buf.state)
.map_err(|e| ErrMode::from_external_error(buf, ErrorKind::Fail, e))?;
Ok(report_line)
}

Expand Down Expand Up @@ -695,7 +696,7 @@ pub fn chunks_file_header<S: StrStream, R: Report, B: ReportBuilder<R>>(
.db
.report_builder
.insert_context(ContextType::TestCase, name)
.unwrap();
.map_err(|e| ErrMode::from_external_error(buf, ErrorKind::Fail, e))?;
buf.state.labels_index.insert(index.clone(), context.id);
}

Expand Down
94 changes: 41 additions & 53 deletions src/parsers/pyreport_shim/chunks/utils.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
use super::{
Complexity, LineSession, MissingBranch, ParseCtx, Partial, PyreportCoverage, ReportLine,
};
use crate::report::{models, Report, ReportBuilder};
use crate::{
error::Result,
report::{models, Report, ReportBuilder},
};

fn separate_pyreport_complexity(complexity: &Complexity) -> (Option<i64>, Option<i64>) {
let (covered, total) = match complexity {
Expand Down Expand Up @@ -55,26 +58,22 @@ fn save_line_session<R: Report, B: ReportBuilder<R>>(
line_session: &LineSession,
coverage_type: &models::CoverageType,
ctx: &mut ParseCtx<R, B>,
) -> Result<models::CoverageSample, ()> {
) -> Result<models::CoverageSample> {
let file_id = ctx.report_json_files[&ctx.chunk.index];

// The chunks file crams three of our model fields into the same "coverage"
// field. We have to separate them.
let (hits, hit_branches, total_branches) = separate_pyreport_coverage(&line_session.coverage);

// Insert the meat of the `LineSession` and get back a `CoverageSample`.
let coverage_sample = ctx
.db
.report_builder
.insert_coverage_sample(
file_id,
ctx.chunk.current_line,
*coverage_type,
hits,
hit_branches,
total_branches,
)
.expect("error inserting coverage sample"); // TODO handle error
let coverage_sample = ctx.db.report_builder.insert_coverage_sample(
file_id,
ctx.chunk.current_line,
*coverage_type,
hits,
hit_branches,
total_branches,
)?;

// We already created a `Context` for each session when processing the report
// JSON. Get the `Context` ID for the current session and then associate it with
Expand All @@ -86,42 +85,34 @@ fn save_line_session<R: Report, B: ReportBuilder<R>>(
None, /* branch_id */
None, /* method_id */
None, /* span_id */
);
)?;

// Check for and insert any additional branches data that we have.
if let Some(Some(missing_branches)) = &line_session.branches {
for branch in missing_branches {
let (branch_format, branch_serialized) = format_pyreport_branch(branch);
let _ = ctx
.db
.report_builder
.insert_branches_data(
file_id,
coverage_sample.id,
0, // Chunks file only records missing branches
branch_format,
branch_serialized,
)
.expect("error inserting branches data");
let _ = ctx.db.report_builder.insert_branches_data(
file_id,
coverage_sample.id,
0, // Chunks file only records missing branches
branch_format,
branch_serialized,
)?;
}
}

// Check for and insert any additional method data we have.
if let Some(Some(complexity)) = &line_session.complexity {
let (covered, total) = separate_pyreport_complexity(complexity);
let _ = ctx
.db
.report_builder
.insert_method_data(
file_id,
Some(coverage_sample.id),
Some(ctx.chunk.current_line),
None, /* hit_branches */
None, /* total_branches */
covered,
total,
)
.expect("error inserting method data");
let _ = ctx.db.report_builder.insert_method_data(
file_id,
Some(coverage_sample.id),
Some(ctx.chunk.current_line),
None, /* hit_branches */
None, /* total_branches */
covered,
total,
)?;
}

// Check for and insert any additional span data we have.
Expand All @@ -136,18 +127,15 @@ fn save_line_session<R: Report, B: ReportBuilder<R>>(
PyreportCoverage::HitCount(hits) => *hits as i64,
_ => 0,
};
ctx.db
.report_builder
.insert_span_data(
file_id,
Some(coverage_sample.id),
hits,
Some(ctx.chunk.current_line),
start_col.map(|x| x as i64),
Some(ctx.chunk.current_line),
end_col.map(|x| x as i64),
)
.expect("error inserting span data");
ctx.db.report_builder.insert_span_data(
file_id,
Some(coverage_sample.id),
hits,
Some(ctx.chunk.current_line),
start_col.map(|x| x as i64),
Some(ctx.chunk.current_line),
end_col.map(|x| x as i64),
)?;
}
}

Expand All @@ -160,7 +148,7 @@ fn save_line_session<R: Report, B: ReportBuilder<R>>(
pub fn save_report_line<R: Report, B: ReportBuilder<R>>(
report_line: &ReportLine,
ctx: &mut ParseCtx<R, B>,
) -> Result<(), ()> {
) -> Result<()> {
// Most of the data we save is at the `LineSession` level
for line_session in &report_line.sessions {
let coverage_sample = save_line_session(line_session, &report_line.coverage_type, ctx)?;
Expand All @@ -177,7 +165,7 @@ pub fn save_report_line<R: Report, B: ReportBuilder<R>>(
None,
None,
None,
);
)?;
}
}
}
Expand Down
14 changes: 7 additions & 7 deletions src/parsers/pyreport_shim/report_json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::collections::HashMap;

use winnow::{
combinator::{cut_err, delimited, separated},
error::{ContextError, ErrMode},
error::{ContextError, ErrMode, ErrorKind, FromExternalError},
PResult, Parser, Stateful,
};

Expand Down Expand Up @@ -84,7 +84,6 @@ pub fn report_file<S: StrStream, R: Report, B: ReportBuilder<R>>(
) -> PResult<(usize, i64)> {
let (filename, file_summary) = delimited(ws, parse_kv, ws).parse_next(buf)?;

println!("{:?}", file_summary);
let Some(chunks_index) = file_summary
.get(0)
// winnow's f64 parser handles scientific notation and such OOTB so we use it for all
Expand All @@ -95,8 +94,11 @@ pub fn report_file<S: StrStream, R: Report, B: ReportBuilder<R>>(
return Err(ErrMode::Cut(ContextError::new()));
};

// TODO handle converting between error types
let file = buf.state.report_builder.insert_file(filename).unwrap();
let file = buf
.state
.report_builder
.insert_file(filename)
.map_err(|e| ErrMode::from_external_error(buf, ErrorKind::Fail, e))?;

Ok((chunks_index as usize, file.id))
}
Expand Down Expand Up @@ -182,14 +184,12 @@ pub fn report_session<S: StrStream, R: Report, B: ReportBuilder<R>>(
return Err(ErrMode::Cut(ContextError::new()));
};

// TODO handle error
let context = buf
.state
.report_builder
.insert_context(models::ContextType::Upload, ci_job)
.unwrap();
.map_err(|e| ErrMode::from_external_error(buf, ErrorKind::Fail, e))?;

// TODO handle error
Ok((session_index, context.id))
}

Expand Down
3 changes: 2 additions & 1 deletion src/report.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ use mockall::automock;
pub mod models;

mod sqlite_report;
use rusqlite::Result;
pub use sqlite_report::*;
use uuid::Uuid;

use crate::error::Result;

#[cfg_attr(test, automock)]
pub trait Report {
fn list_files(&self) -> Result<Vec<models::SourceFile>>;
Expand Down
Loading

0 comments on commit fd14014

Please sign in to comment.