Implement writing LBA header
This commit is contained in:
parent
eee02d2ddb
commit
f0fff92ca5
|
@ -6,7 +6,6 @@ edition = "2021"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
byteorder = "*"
|
||||
cdtypes = {path = "../cdtypes"}
|
||||
clap = {version = "*", features = ["derive"]}
|
||||
no-comment = "*"
|
||||
|
|
|
@ -30,9 +30,9 @@ pub fn process(config: config_reader::Configuration, calculate_lba: LbaCalculato
|
|||
pub fn process_files(file_map: FileSystemMap, lba_embedded_files: LBAEmbeddedFiles, length_func: LengthCalculatorFunction) -> Result<(), Error> {
|
||||
for lba_embedded_file_raw in lba_embedded_files {
|
||||
let new_content_info = {
|
||||
let lba_embedded_file = lba_embedded_file_raw.borrow();
|
||||
let mut lba_embedded_file = lba_embedded_file_raw.borrow_mut();
|
||||
|
||||
match &lba_embedded_file.content {
|
||||
match &mut lba_embedded_file.content {
|
||||
FileType::Overlay(content, lba_names) => {
|
||||
let new_content = types::overlay::update_content(content, lba_names, &file_map, length_func)?;
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use super::{layout::Layout, File, FileSystemMap};
|
||||
use super::super::encoder::LengthCalculatorFunction;
|
||||
use std::path::PathBuf;
|
||||
use byteorder::{ByteOrder, LittleEndian};
|
||||
use no_comment::{IntoWithoutComments as _, languages};
|
||||
use tool_helper::{Error, format_if_error, read_file};
|
||||
|
||||
|
@ -11,14 +10,29 @@ const COMPRESSION_LEVEL:u32 = 16;
|
|||
|
||||
#[repr(packed)]
|
||||
struct OverlayHeader {
|
||||
pub start_adr: u32,
|
||||
pub _lba_count: u16,
|
||||
_start_adr: u32,
|
||||
lba_count: u16,
|
||||
}
|
||||
|
||||
impl OverlayHeader {
|
||||
pub fn lba_count_offset() -> usize {
|
||||
let dummy = OverlayHeader{start_adr: 0, _lba_count: 0}.start_adr;
|
||||
std::mem::size_of_val(&dummy)
|
||||
pub fn read_lba_count(&self) -> usize {
|
||||
u16::from_le(self.lba_count) as usize
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(packed)]
|
||||
struct LBAEntry {
|
||||
lba: u16,
|
||||
sectors: u16,
|
||||
}
|
||||
|
||||
impl LBAEntry {
|
||||
pub fn write_entry(&mut self, lba: u16, sectors: u16) {
|
||||
let lba = lba.to_le_bytes();
|
||||
let sectors = sectors.to_le_bytes();
|
||||
|
||||
self.lba = u16::from_ne_bytes(lba);
|
||||
self.sectors = u16::from_ne_bytes(sectors);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,12 +44,21 @@ pub fn load_from(file_name: &str, file_path: PathBuf, lba_source: PathBuf) -> Re
|
|||
Ok(File::new_overlay(file_name, content, lba_names, content_size)?)
|
||||
}
|
||||
|
||||
pub fn update_content(content: &Vec<u8>, lba_names: &LBANameVec, file_map: &FileSystemMap, length_func: LengthCalculatorFunction) -> Result<Vec<u8>, Error> {
|
||||
pub fn update_content(content: &mut Vec<u8>, lba_names: &LBANameVec, file_map: &FileSystemMap, length_func: LengthCalculatorFunction) -> Result<Vec<u8>, Error> {
|
||||
let (lba_header, lba_count) = skip_to_lba_header(content);
|
||||
let lba_header = unsafe{std::slice::from_raw_parts_mut(lba_header.as_mut_ptr() as *mut LBAEntry, lba_count)};
|
||||
|
||||
let mut idx = 0;
|
||||
for lba_name in lba_names {
|
||||
if let Some(file) = file_map.get(lba_name) {
|
||||
let (lba, sector_count) = (file.borrow().get_track_rel_lba(), length_func(&Layout::File(file.clone())).sectors);
|
||||
|
||||
println!("Totally writing: {} with @{} ({})", lba_name, lba, sector_count);
|
||||
if idx >= lba_count {
|
||||
return Err(Error::from_text(format!("Trying to write more LBAs then there is space!")));
|
||||
}
|
||||
|
||||
lba_header[idx].write_entry(lba as u16, sector_count as u16);
|
||||
idx += 1;
|
||||
}
|
||||
|
||||
else {
|
||||
|
@ -43,28 +66,17 @@ pub fn update_content(content: &Vec<u8>, lba_names: &LBANameVec, file_map: &File
|
|||
}
|
||||
}
|
||||
|
||||
//Ok(tool_helper::compress::lz4(content, COMPRESSION_LEVEL)?)
|
||||
//^ real code - v to make it fail (hopefully)
|
||||
let mut new_content = content.clone();
|
||||
for _ in 0..2048 {
|
||||
new_content.push(0x0);
|
||||
}
|
||||
|
||||
Ok(new_content)
|
||||
Ok(tool_helper::compress::lz4(content, COMPRESSION_LEVEL)?)
|
||||
}
|
||||
|
||||
fn load_content(file_path: &PathBuf) -> Result<Vec<u8>, Error> {
|
||||
let overlay_header_size = std::mem::size_of::<OverlayHeader>();
|
||||
let mut content = read_file(&file_path)?;
|
||||
let mut content = read_file(&file_path)?;
|
||||
|
||||
if content.len() < overlay_header_size {
|
||||
if content.len() < std::mem::size_of::<OverlayHeader>() {
|
||||
return Err(Error::from_text(format!("Overlay {} has no header!", file_path.to_string_lossy())));
|
||||
}
|
||||
|
||||
let lba_count = LittleEndian::read_u16(&content[OverlayHeader::lba_count_offset()..]) as usize;
|
||||
let lba_header_size = lba_count*std::mem::size_of::<u32>();
|
||||
let lba_header = &mut content[overlay_header_size..(overlay_header_size+lba_header_size)];
|
||||
|
||||
let (lba_header, lba_count) = skip_to_lba_header(&mut content);
|
||||
if lba_header.is_empty() {
|
||||
return Err(Error::from_text(format!("LBA header of overlay {} is smaller then {} elements", file_path.to_string_lossy(), lba_count)));
|
||||
}
|
||||
|
@ -78,6 +90,15 @@ fn load_content(file_path: &PathBuf) -> Result<Vec<u8>, Error> {
|
|||
Ok(content)
|
||||
}
|
||||
|
||||
fn skip_to_lba_header(content: &mut Vec<u8>) -> (&mut [u8], usize) {
|
||||
let overlay_header_size = std::mem::size_of::<OverlayHeader>();
|
||||
|
||||
let lba_count = unsafe{std::mem::transmute::<&u8, &OverlayHeader>(&content[0])}.read_lba_count();
|
||||
let lba_header_size = lba_count*std::mem::size_of::<LBAEntry>();
|
||||
|
||||
(&mut content[overlay_header_size..(overlay_header_size+lba_header_size)], lba_count)
|
||||
}
|
||||
|
||||
fn load_lba_names(lba_source: PathBuf) -> Result<LBANameVec, Error> {
|
||||
const LBA_DECLARATION:&'static str = "__jabyengine_request_lba_for";
|
||||
fn get_part_of_interest(file: String, lba_source: &PathBuf) -> Result<String, Error> {
|
||||
|
|
Loading…
Reference in New Issue