Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Compress files into a single coverage.zip and update upload logic accordingly #1528

Merged
merged 9 commits into from
Feb 27, 2025
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions qlty-cloud/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,6 @@ ureq.workspace = true
uuid.workspace = true
webbrowser.workspace = true
zip.workspace = true

[dev-dependencies]
tempfile = "3.2"
122 changes: 94 additions & 28 deletions qlty-cloud/src/export/coverage.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
use crate::format::{GzFormatter, JsonEachRowFormatter, JsonFormatter};
use crate::format::{JsonEachRowFormatter, JsonFormatter};
use anyhow::{Context, Result};
use qlty_types::tests::v1::{CoverageMetadata, FileCoverage, ReportFile};
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use zip::{write::FileOptions, ZipWriter};

fn compress_files(files: Vec<String>, output_file: &Path) -> Result<()> {
fn compress_files(files: HashMap<String, PathBuf>, output_file: &Path) -> Result<()> {
// Create the output ZIP file
let zip_file = File::create(output_file)?;
let mut zip = ZipWriter::new(zip_file);
Expand All @@ -15,20 +16,16 @@ fn compress_files(files: Vec<String>, output_file: &Path) -> Result<()> {
.compression_method(zip::CompressionMethod::Deflated) // Compression method
.unix_permissions(0o755);

// Iterate over the list of files to compress
for file_path in files {
let path = Path::new(&file_path);

if path.is_file() {
for (name, file_path) in &files {
if file_path.is_file() {
// Add the file to the archive
// Use path as filename in case multiple files with same name
zip.start_file(path.to_string_lossy(), options)?;
zip.start_file(name, options)?;

// Write the file content to the archive
let mut file = File::open(path)?;
let mut file = File::open(file_path)?;
std::io::copy(&mut file, &mut zip)?;
} else {
eprintln!("Skipping non-file: {}", file_path);
eprintln!("Skipping non-file: {}", file_path.to_string_lossy());
}
}

Expand All @@ -54,34 +51,44 @@ impl CoverageExport {
fn export(&self) -> Result<()> {
let directory = self.to.as_ref().unwrap();

GzFormatter::new(JsonEachRowFormatter::new(self.report_files.clone()))
.write_to_file(&directory.join("report_files.json.gz"))?;
JsonEachRowFormatter::new(self.report_files.clone())
.write_to_file(&directory.join("report_files.jsonl"))?;

GzFormatter::new(JsonEachRowFormatter::new(self.file_coverages.clone()))
.write_to_file(&directory.join("file_coverages.json.gz"))?;
JsonEachRowFormatter::new(self.file_coverages.clone())
.write_to_file(&directory.join("file_coverages.jsonl"))?;

JsonFormatter::new(self.metadata.clone())
.write_to_file(&directory.join("metadata.json"))?;

let raw_file_paths = self
.report_files
.iter()
.map(|report_file| &report_file.path)
.cloned()
.collect();
let zip_file_contents = self.compute_zip_file_contents(directory)?;

compress_files(raw_file_paths, &directory.join("raw_files.zip"))
compress_files(zip_file_contents, &directory.join("coverage.zip"))
}

pub fn total_size_bytes(&self) -> Result<u64> {
let mut bytes: u64 = 0;
Ok(self.read_file("coverage.zip")?.len() as u64)
}

bytes += self.read_file("report_files.json.gz")?.len() as u64;
bytes += self.read_file("file_coverages.json.gz")?.len() as u64;
bytes += self.read_file("metadata.json")?.len() as u64;
bytes += self.read_file("raw_files.zip")?.len() as u64;
fn compute_zip_file_contents(&self, directory: &Path) -> Result<HashMap<String, PathBuf>> {
let mut files_to_zip = HashMap::new();

files_to_zip.insert(
"report_files.jsonl".to_string(),
directory.join("report_files.jsonl"),
);
files_to_zip.insert(
"file_coverages.jsonl".to_string(),
directory.join("file_coverages.jsonl"),
);
files_to_zip.insert("metadata.json".to_string(), directory.join("metadata.json"));

for report_file in &self.report_files {
let actual_path = PathBuf::from(&report_file.path);
let zip_file_name = PathBuf::from("raw_files").join(&report_file.path);
files_to_zip.insert(zip_file_name.to_string_lossy().into_owned(), actual_path);
}

Ok(bytes)
Ok(files_to_zip)
}

pub fn read_file<P: AsRef<Path>>(&self, filename: P) -> Result<Vec<u8>> {
Expand All @@ -97,3 +104,62 @@ impl CoverageExport {
Ok(buffer)
}
}

#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
use tempfile::{tempdir, TempDir};
use zip::read::ZipArchive;

#[test]
fn test_export_to() {
let destination_binding = tempdir().unwrap();
let destination = destination_binding.path();

let raw_files_temp_binding = TempDir::new_in(".").unwrap();
let raw_files_dir = raw_files_temp_binding.path();

let f1 = &raw_files_dir.join("coverage.lcov");
let mut file = File::create(f1).unwrap();
writeln!(file, "D").unwrap();

let metadata = CoverageMetadata::default();
let report_files = vec![ReportFile {
path: raw_files_dir
.file_name()
.map(|name| {
Path::new(name)
.join("coverage.lcov")
.to_string_lossy()
.into_owned()
})
.unwrap_or_default(),
..Default::default()
}];
let file_coverages = vec![FileCoverage::default()];

let mut export = CoverageExport {
metadata,
report_files,
file_coverages,
to: None,
};

export.export_to(Some(destination.to_path_buf())).unwrap();

assert!(destination.join("coverage.zip").exists());

// Verify the contents of the zip file
let zip_file = File::open(destination.join("coverage.zip")).unwrap();
let mut zip = ZipArchive::new(zip_file).unwrap();
assert!(zip.by_name("report_files.jsonl").is_ok());
assert!(zip.by_name("file_coverages.jsonl").is_ok());
assert!(zip.by_name("metadata.json").is_ok());
let raw_file_path = format!(
"raw_files/{}/coverage.lcov",
raw_files_dir.file_name().unwrap().to_string_lossy()
);
assert!(zip.by_name(&raw_file_path).is_ok());
}
}
76 changes: 8 additions & 68 deletions qlty-coverage/src/publish/upload.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,63 +12,24 @@ const LEGACY_API_URL: &str = "https://qlty.sh/api";
pub struct Upload {
pub id: String,
pub project_id: String,
pub file_coverages_url: String,
pub report_files_url: String,
pub metadata_url: String,
pub raw_files_url: String,
pub coverage_url: String,
}

impl Upload {
pub fn prepare(token: &str, report: &mut Report) -> Result<Self> {
let response = Self::request_api(&report.metadata, token)?;

let file_coverages_url = response
let coverage_url = response
.get("data")
.and_then(|data| data.get("file_coverages.json.gz"))
.and_then(|data| data.get("coverage.zip"))
.and_then(|upload_url| upload_url.as_str())
.with_context(|| {
format!(
"Unable to find file coverages URL in response body: {:?}",
"Unable to find coverage URL in response body: {:?}",
response
)
})
.context("Failed to extract file coverages URL from response")?;

let report_files_url = response
.get("data")
.and_then(|data| data.get("report_files.json.gz"))
.and_then(|upload_url| upload_url.as_str())
.with_context(|| {
format!(
"Unable to find report files URL in response body: {:?}",
response
)
})
.context("Failed to extract report files URL from response")?;

let metadata_url = response
.get("data")
.and_then(|data| data.get("metadata.json"))
.and_then(|upload_url| upload_url.as_str())
.with_context(|| {
format!(
"Unable to find metadata URL in response body: {:?}",
response
)
})
.context("Failed to extract metadata URL from response")?;

let raw_files_url = response
.get("data")
.and_then(|data| data.get("raw_files.zip"))
.and_then(|upload_url| upload_url.as_str())
.with_context(|| {
format!(
"Unable to find metadata URL in response body: {:?}",
response
)
})
.context("Failed to extract metadata URL from response")?;
.context("Failed to extract coverage URL from response")?;

let id = response
.get("data")
Expand All @@ -90,36 +51,15 @@ impl Upload {
Ok(Self {
id: id.to_string(),
project_id: project_id.to_string(),
file_coverages_url: file_coverages_url.to_string(),
report_files_url: report_files_url.to_string(),
metadata_url: metadata_url.to_string(),
raw_files_url: raw_files_url.to_string(),
coverage_url: coverage_url.to_string(),
})
}

pub fn upload(&self, export: &CoverageExport) -> Result<()> {
self.upload_data(
&self.file_coverages_url,
"application/gzip",
export.read_file(PathBuf::from("file_coverages.json.gz"))?,
)?;

self.upload_data(
&self.report_files_url,
"application/gzip",
export.read_file(PathBuf::from("report_files.json.gz"))?,
)?;

self.upload_data(
&self.metadata_url,
"application/json",
export.read_file(PathBuf::from("metadata.json"))?,
)?;

self.upload_data(
&self.raw_files_url,
&self.coverage_url,
"application/zip",
export.read_file(PathBuf::from("raw_files.zip"))?,
export.read_file(PathBuf::from("coverage.zip"))?,
)?;

Ok(())
Expand Down
Loading