Setup infrastructure to update filetype
This commit is contained in:
parent
f5c296a22e
commit
eee02d2ddb
|
@ -7,7 +7,7 @@ pub mod types;
|
||||||
|
|
||||||
use encoder::{LbaCalculatorFunction, LengthCalculatorFunction};
|
use encoder::{LbaCalculatorFunction, LengthCalculatorFunction};
|
||||||
use tool_helper::{format_if_error, Output, read_file};
|
use tool_helper::{format_if_error, Output, read_file};
|
||||||
use types::{CDDesc, Directory, File, FileType, FileSystemMap, Properties, SharedPtr};
|
use types::{layout::Layout, CDDesc, Directory, File, FileType, FileSystemMap, Properties, SharedPtr};
|
||||||
|
|
||||||
pub type LBAEmbeddedFiles = Vec<SharedPtr<File>>;
|
pub type LBAEmbeddedFiles = Vec<SharedPtr<File>>;
|
||||||
|
|
||||||
|
@ -28,18 +28,41 @@ pub fn process(config: config_reader::Configuration, calculate_lba: LbaCalculato
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn process_files(file_map: FileSystemMap, lba_embedded_files: LBAEmbeddedFiles, length_func: LengthCalculatorFunction) -> Result<(), Error> {
|
pub fn process_files(file_map: FileSystemMap, lba_embedded_files: LBAEmbeddedFiles, length_func: LengthCalculatorFunction) -> Result<(), Error> {
|
||||||
for lba_embedded_file in lba_embedded_files {
|
for lba_embedded_file_raw in lba_embedded_files {
|
||||||
let mut lba_embedded_file = lba_embedded_file.borrow_mut();
|
let new_content_info = {
|
||||||
|
let lba_embedded_file = lba_embedded_file_raw.borrow();
|
||||||
|
|
||||||
match &mut lba_embedded_file.content {
|
match &lba_embedded_file.content {
|
||||||
FileType::Overlay(content, lba_names) => {
|
FileType::Overlay(content, lba_names) => {
|
||||||
let _new_content = types::overlay::update_content(std::mem::take(content), std::mem::take(lba_names), &file_map, length_func)?;
|
let new_content = types::overlay::update_content(content, lba_names, &file_map, length_func)?;
|
||||||
|
|
||||||
|
Some(new_content)
|
||||||
},
|
},
|
||||||
_ =>()
|
_ => None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(new_content) = new_content_info {
|
||||||
|
let old_size_info = length_func(&Layout::File(lba_embedded_file_raw.clone()));
|
||||||
|
lba_embedded_file_raw.borrow_mut().make_regular(new_content);
|
||||||
|
let new_size_info = length_func(&Layout::File(lba_embedded_file_raw.clone()));
|
||||||
|
|
||||||
|
if new_size_info.sectors != old_size_info.sectors {
|
||||||
|
let lba_embedded_file = lba_embedded_file_raw.borrow();
|
||||||
|
let lba_embedded_file_name = lba_embedded_file.name.to_string();
|
||||||
|
|
||||||
|
if new_size_info.sectors < old_size_info.sectors {
|
||||||
|
return Err(Error::from_text(format!("Failed converting Overlay \"{}\" because new size ({} sectors) is small then {} sectors! (This might be allowed in the future)", lba_embedded_file_name, new_size_info.sectors, old_size_info.sectors)));
|
||||||
|
}
|
||||||
|
|
||||||
|
else {
|
||||||
|
return Err(Error::from_text(format!("Failed converting Overlay \"{}\" because new size ({} sectors) is bigger then {} sectors!", lba_embedded_file_name, new_size_info.sectors, old_size_info.sectors)));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(Error::not_implemented("process_files"))
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dump_content(cd_desc: &CDDesc, mut out: Output) -> Result<(), Error> {
|
pub fn dump_content(cd_desc: &CDDesc, mut out: Output) -> Result<(), Error> {
|
||||||
|
|
|
@ -206,6 +206,11 @@ impl File {
|
||||||
self.properties.get_padded_size()
|
self.properties.get_padded_size()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn make_regular(&mut self, content: Vec<u8>) {
|
||||||
|
self.properties.size_bytes = content.len();
|
||||||
|
self.content = FileType::Regular(content);
|
||||||
|
}
|
||||||
|
|
||||||
fn new_from_content(file_name: &str, content: FileType, content_size: usize) -> Result<File, Error> {
|
fn new_from_content(file_name: &str, content: FileType, content_size: usize) -> Result<File, Error> {
|
||||||
let mut file = File{name: FileName::from_str(file_name)?, properties: Properties::default(), parent_properties: None, content: content};
|
let mut file = File{name: FileName::from_str(file_name)?, properties: Properties::default(), parent_properties: None, content: content};
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,8 @@ use tool_helper::{Error, format_if_error, read_file};
|
||||||
|
|
||||||
pub type LBANameVec = Vec<String>;
|
pub type LBANameVec = Vec<String>;
|
||||||
|
|
||||||
|
const COMPRESSION_LEVEL:u32 = 16;
|
||||||
|
|
||||||
#[repr(packed)]
|
#[repr(packed)]
|
||||||
struct OverlayHeader {
|
struct OverlayHeader {
|
||||||
pub start_adr: u32,
|
pub start_adr: u32,
|
||||||
|
@ -23,21 +25,17 @@ impl OverlayHeader {
|
||||||
pub fn load_from(file_name: &str, file_path: PathBuf, lba_source: PathBuf) -> Result<File, Error> {
|
pub fn load_from(file_name: &str, file_path: PathBuf, lba_source: PathBuf) -> Result<File, Error> {
|
||||||
let content = load_content(&file_path)?;
|
let content = load_content(&file_path)?;
|
||||||
let lba_names = load_lba_names(lba_source)?;
|
let lba_names = load_lba_names(lba_source)?;
|
||||||
let content_size = format_if_error!(tool_helper::compress::lz4(&content, 16), "Compressing {} failed with \"{error_text}\"", file_path.to_string_lossy())?.len();
|
let content_size = format_if_error!(tool_helper::compress::lz4(&content, COMPRESSION_LEVEL), "Compressing {} failed with \"{error_text}\"", file_path.to_string_lossy())?.len();
|
||||||
|
|
||||||
Ok(File::new_overlay(file_name, content, lba_names, content_size)?)
|
Ok(File::new_overlay(file_name, content, lba_names, content_size)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_content(_content: Vec<u8>, lba_names: LBANameVec, file_map: &FileSystemMap, length_func: LengthCalculatorFunction) -> Result<Vec<u8>, Error> {
|
pub fn update_content(content: &Vec<u8>, lba_names: &LBANameVec, file_map: &FileSystemMap, length_func: LengthCalculatorFunction) -> Result<Vec<u8>, Error> {
|
||||||
for mut lba_name in lba_names {
|
for lba_name in lba_names {
|
||||||
if lba_name.find(';').is_none() {
|
if let Some(file) = file_map.get(lba_name) {
|
||||||
lba_name.push_str(";1");
|
let (lba, sector_count) = (file.borrow().get_track_rel_lba(), length_func(&Layout::File(file.clone())).sectors);
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(file) = file_map.get(&lba_name) {
|
println!("Totally writing: {} with @{} ({})", lba_name, lba, sector_count);
|
||||||
let length_info = length_func(&Layout::File(file.clone()));
|
|
||||||
|
|
||||||
println!("Found: {} @{} ({})", lba_name, file.borrow().get_track_rel_lba(), length_info.sectors);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
|
@ -45,7 +43,14 @@ pub fn update_content(_content: Vec<u8>, lba_names: LBANameVec, file_map: &FileS
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(Error::not_implemented("update_overlay"))
|
//Ok(tool_helper::compress::lz4(content, COMPRESSION_LEVEL)?)
|
||||||
|
//^ real code - v to make it fail (hopefully)
|
||||||
|
let mut new_content = content.clone();
|
||||||
|
for _ in 0..2048 {
|
||||||
|
new_content.push(0x0);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(new_content)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_content(file_path: &PathBuf) -> Result<Vec<u8>, Error> {
|
fn load_content(file_path: &PathBuf) -> Result<Vec<u8>, Error> {
|
||||||
|
@ -98,7 +103,11 @@ fn load_lba_names(lba_source: PathBuf) -> Result<LBANameVec, Error> {
|
||||||
if let Some(mut start) = pair.find('"') {
|
if let Some(mut start) = pair.find('"') {
|
||||||
start += 1;
|
start += 1;
|
||||||
if let Some(end) = pair[start..].find('"') {
|
if let Some(end) = pair[start..].find('"') {
|
||||||
lba_names.push(pair[start..(start + end)].to_owned());
|
let mut lba_name = pair[start..(start + end)].to_owned();
|
||||||
|
if lba_name.find(';').is_none() {
|
||||||
|
lba_name.push_str(";1");
|
||||||
|
}
|
||||||
|
lba_names.push(lba_name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue