Skip to content

Commit

Permalink
Fix some bugs
Browse files Browse the repository at this point in the history
  • Loading branch information
bagedevimo committed Jun 6, 2019
1 parent d855265 commit 0d801ef
Show file tree
Hide file tree
Showing 3 changed files with 67 additions and 24 deletions.
77 changes: 53 additions & 24 deletions src/git/connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,9 +199,9 @@ fn parse_pack_object_record(
let record_type = (byte >> 4) & 0x7;

let record: crate::git::database::Record = match record_type {
RECORD_TYPE_COMMIT => crate::git::record::parse_commit(inflate_record_data(reader)),
RECORD_TYPE_TREE => crate::git::record::parse_tree(inflate_record_data(reader)),
RECORD_TYPE_BLOB => crate::git::record::parse_blob(inflate_record_data(reader)),
RECORD_TYPE_COMMIT => crate::git::record::parse_commit(inflate_record_data(reader).0),
RECORD_TYPE_TREE => crate::git::record::parse_tree(inflate_record_data(reader).0),
RECORD_TYPE_BLOB => crate::git::record::parse_blob(inflate_record_data(reader).0),
RECORD_TYPE_OFS_DELTA => {
// let (byte, value) = crate::git::connection::read_variable_length_int(reader);
// eprintln!("byte: {:?}, value: {:?}", byte, value);
Expand Down Expand Up @@ -235,18 +235,15 @@ fn inflate_xdelta_record<T: Read>(mut reader: &mut T, database: &mut Database) -
reader.read(&mut oid_bytes);

let source_id = crate::git::database::ObjectID::from_oid_bytes(oid_bytes);
let source_object = match database.fetch(&source_id) {
Some(o) => o,
None => panic!("Can't inflate object without source ({})", source_id),
};
let source_object = database.fetch(&source_id);

// let (byte, value) = crate::git::connection::read_variable_length_int(reader);
// eprintln!("byte: {:?}, value: {:?}", byte, value as i8);

// let (byte, value) = crate::git::connection::read_variable_length_int(reader);
// eprintln!("byte: {:?}, value: {:?}", byte, value as i8);

let bytes = inflate_record_data(reader);
let (bytes, compressed_byte_count) = inflate_record_data(&mut reader);
let mut secondary_cursor = std::io::Cursor::new(bytes.clone());

let (_, v1) = crate::git::connection::read_variable_length_int(&mut secondary_cursor, 7);
Expand All @@ -265,8 +262,9 @@ fn inflate_xdelta_record<T: Read>(mut reader: &mut T, database: &mut Database) -

// let mut buffer = vec![0u8; size as usize];
// reader.read_exact(&mut buffer).unwrap();

// eprintln!("Read: {:?}", buffer);
if bytes.len() == 20 {
eprintln!("Insert: {}", peek[0]);
}

out_buffer.push(peek[0]);
} else {
Expand All @@ -275,35 +273,55 @@ fn inflate_xdelta_record<T: Read>(mut reader: &mut T, database: &mut Database) -
let offset = value & 0xffffffff;
let size = value >> 32;

if bytes.len() == 20 {
eprintln!("Copy: {} -> {}", offset, offset + size);
}

let actual_size = if size == 0 {
crate::git::connection::GIT_MAX_COPY
} else {
size
};

let data = match source_object {
Record::Commit { data, .. } => data,
Record::Tree { data, .. } => data,
Record::Blob { data, .. } => data,
match source_object {
Some(so) => {
let data = match so {
Record::Commit { data, .. } => data,
Record::Tree { data, .. } => data,
Record::Blob { data, .. } => data,
};

let mut bytes_to_copy: Vec<u8> = vec![0; size as usize];

bytes_to_copy.copy_from_slice(&data[offset as usize..(offset + size) as usize]);
// let bytes_to_copy = data[offset as usize..size as usize];

// eprintln!("Copying\n{}\n\n", String::from_utf8_lossy(&bytes_to_copy));
out_buffer.append(&mut bytes_to_copy);
}
None => {
eprintln!(
"WARNING: Forced to skip XDELTA decompression because we can't find {}",
source_id
);
}
};

let mut bytes_to_copy: Vec<u8> = vec![0; size as usize];

bytes_to_copy.copy_from_slice(&data[offset as usize..(offset + size) as usize]);
// let bytes_to_copy = data[offset as usize..size as usize];

// eprintln!("Copying\n{}\n\n", String::from_utf8_lossy(&bytes_to_copy));
out_buffer.append(&mut bytes_to_copy);
}
}

if out_buffer.len() > 300 && out_buffer.len() < 400 {
eprintln!("{:?}", out_buffer);
eprintln!("=============({})\n{:?}", bytes.len(), bytes);
}

out_buffer
}

fn inflate_record_data<T: Read>(reader: &mut T) -> Vec<u8> {
fn inflate_record_data<T: Read>(reader: &mut T) -> (Vec<u8>, u64) {
let mut deflater = flate2::Decompress::new(true);

let mut output: Vec<u8> = Vec::with_capacity(65000 as usize);
let mut input: Vec<u8> = Vec::new();

loop {
let mut in_byte: [u8; 1] = [0; 1];
Expand All @@ -313,6 +331,8 @@ fn inflate_record_data<T: Read>(reader: &mut T) -> Vec<u8> {
Err(e) => panic!("Unexpected EOF inflating DEFLATE stream: {}", e),
}

input.push(in_byte[0]);

let status = deflater.decompress_vec(&in_byte, &mut output, flate2::FlushDecompress::None);

match status {
Expand All @@ -323,5 +343,14 @@ fn inflate_record_data<T: Read>(reader: &mut T) -> Vec<u8> {
}
}

output
if deflater.total_out() > 300 && deflater.total_out() < 400 {
eprintln!(
"Inflate finished, inflated to {} bytes:\n==============\n{:?}\n------------------\n{:?}\n\n",
deflater.total_in(),
input,
output,
);
}

(output, deflater.total_in())
}
13 changes: 13 additions & 0 deletions src/git/database.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crypto::digest::Digest;
use crypto::sha1::Sha1;
use std::collections::HashMap;
use std::io::Write;

pub struct Database {
pub entries: HashMap<ObjectID, Record>,
Expand Down Expand Up @@ -133,5 +134,17 @@ pub fn get_object_id(record: &Record) -> String {

let mut hasher = Sha1::new();
hasher.input(&hash_data);

// let mut f = std::fs::File::open("dumped_object").unwrap();
// f.write(&hash_data);

if size == 377 {
std::fs::write("dumped_object", &hash_data);
eprintln!(
"{} is \n{:?}\n\n",
hasher.result_str(),
hex::encode(hash_data)
);
}
hasher.result_str()
}
1 change: 1 addition & 0 deletions src/git/record.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ pub fn parse_blob(d: Vec<u8>) -> Record {
}

pub fn parse_ofs_delta(d: Vec<u8>) -> Record {
panic!("OFSDelta is not implemented!");
Record::Blob { data: Vec::from(d) }
}

Expand Down

0 comments on commit 0d801ef

Please sign in to comment.