Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cleanup comments and cargo fmt #290

Merged
merged 3 commits into from
Aug 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 6 additions & 14 deletions examples/add_barcode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,26 +70,18 @@ fn generate_operations(rects: Vec<(f64, f64, f64, f64, u8)>) -> String {

#[cfg(not(feature = "async"))]
fn load_pdf<P: AsRef<Path>>(path: P) -> Result<Document, Error> {
Document::load(path)
.map_err(|e| Error::new(ErrorKind::Other, e.to_string()))
Document::load(path).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))
}

#[cfg(feature = "async")]
fn load_pdf<P: AsRef<Path>>(path: P) -> Result<Document, Error> {
Ok(
Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move {
Document::load(path)
.await
.map_err(|e| Error::new(ErrorKind::Other, e.to_string()))
})?
)
Ok(Builder::new_current_thread().build().unwrap().block_on(async move {
Document::load(path)
.await
.map_err(|e| Error::new(ErrorKind::Other, e.to_string()))
})?)
}



#[allow(non_upper_case_globals)]
const mm2pt: f32 = 2.834;

Expand Down
20 changes: 6 additions & 14 deletions examples/extract_text.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,19 +106,13 @@ fn load_pdf<P: AsRef<Path>>(path: P) -> Result<Document, Error> {

#[cfg(feature = "async")]
fn load_pdf<P: AsRef<Path>>(path: P) -> Result<Document, Error> {
Ok(
Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move {
Document::load_filtered(path, filter_func)
.await
.map_err(|e| Error::new(ErrorKind::Other, e.to_string()))
})?
)
Ok(Builder::new_current_thread().build().unwrap().block_on(async move {
Document::load_filtered(path, filter_func)
.await
.map_err(|e| Error::new(ErrorKind::Other, e.to_string()))
})?)
}


fn get_pdf_text(doc: &Document) -> Result<PdfText, Error> {
let mut pdf_text: PdfText = PdfText {
text: BTreeMap::new(),
Expand Down Expand Up @@ -162,9 +156,7 @@ fn pdf2text<P: AsRef<Path> + Debug>(path: P, output: P, pretty: bool, password:
let mut doc = load_pdf(&path)?;
if doc.is_encrypted() {
doc.decrypt(password)
.map_err(|_err|
Error::new(ErrorKind::InvalidInput, "Failed to decrypt")
)?;
.map_err(|_err| Error::new(ErrorKind::InvalidInput, "Failed to decrypt"))?;
}
let text = get_pdf_text(&doc)?;
if !text.errors.is_empty() {
Expand Down
17 changes: 5 additions & 12 deletions examples/extract_toc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,19 +93,13 @@ fn load_pdf<P: AsRef<Path>>(path: P) -> Result<Document, Error> {

#[cfg(feature = "async")]
fn load_pdf<P: AsRef<Path>>(path: P) -> Result<Document, Error> {
Ok(
Builder::new_current_thread()
.build()
.unwrap()
.block_on(async move {
Document::load_filtered(path, filter_func)
.await
.map_err(|e| Error::new(ErrorKind::Other, e.to_string()))
})?
)
Ok(Builder::new_current_thread().build().unwrap().block_on(async move {
Document::load_filtered(path, filter_func)
.await
.map_err(|e| Error::new(ErrorKind::Other, e.to_string()))
})?)
}


fn pdf2toc<P: AsRef<Path> + Debug>(path: P, output: P, pretty: bool) -> Result<(), Error> {
println!("Load {path:?}");
let doc = load_pdf(&path)?;
Expand Down Expand Up @@ -147,4 +141,3 @@ fn main() -> Result<(), Error> {
);
Ok(())
}

28 changes: 16 additions & 12 deletions examples/print_annotations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,10 @@
//
// Run with `cargo run --example print_annotations <pdf-file>`


use std::env;
use std::process;
use env_logger::Env;
use lopdf::{Document, Object};

use std::env;
use std::process;

fn logging() {
env_logger::Builder::from_env(Env::default().default_filter_or("info")).init()
Expand All @@ -29,19 +27,25 @@ fn handle_pdf_page(doc: Document) -> u32 {

for page in doc.page_iter() {
for a in doc.get_page_annotations(page) {
let subtype = a.get_deref(b"Subtype", &doc)
let subtype = a
.get_deref(b"Subtype", &doc)
.and_then(Object::as_name_str)
.unwrap_or("");
println!("Page {}, {} annotation at {:?}",
page_counter,
subtype,
a.get_deref(b"Rect", &doc).and_then(Object::as_array).unwrap());
println!(
"Page {}, {} annotation at {:?}",
page_counter,
subtype,
a.get_deref(b"Rect", &doc).and_then(Object::as_array).unwrap()
);
if let Ok(Object::String(c, _)) = a.get_deref(b"Contents", &doc) {
println!(" Contents: {:.60}", String::from_utf8_lossy(c).lines().next().unwrap());
}
if subtype.eq("Link") {
if let Ok(ahref) = a.get_deref(b"A", &doc).and_then(Object::as_dict) {
print!(" {} -> ", ahref.get_deref(b"S", &doc).and_then(Object::as_name_str).unwrap());
print!(
" {} -> ",
ahref.get_deref(b"S", &doc).and_then(Object::as_name_str).unwrap()
);
if let Ok(d) = ahref.get_deref(b"D", &doc).and_then(Object::as_array) {
println!("{:?}", d);
} else if let Ok(Object::String(u, _)) = ahref.get_deref(b"URI", &doc) {
Expand All @@ -59,7 +63,7 @@ fn handle_pdf_page(doc: Document) -> u32 {
}

#[cfg(not(feature = "async"))]
fn main () {
fn main() {
logging();

let args: Vec<String> = args();
Expand All @@ -81,4 +85,4 @@ async fn main() {
Ok(doc) => _ = handle_pdf_page(doc),
Err(e) => eprintln!("Error opening {:?}: {:?}", &args[1], e),
}
}
}
17 changes: 6 additions & 11 deletions examples/rotate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,13 @@ fn main() {
// but you can also set it on any node in the page tree and child pages will
// inherit the value.
for (_, page_id) in doc.get_pages() {
let page_dict = doc.get_object_mut(page_id)
let page_dict = doc
.get_object_mut(page_id)
.and_then(|obj| obj.as_dict_mut())
.expect("Missing page!");

// Get the current rotation if any; the default is 0
let current_rotation = page_dict
.get(b"Rotate")
.and_then(|obj| obj.as_i64())
.unwrap_or(0);
let current_rotation = page_dict.get(b"Rotate").and_then(|obj| obj.as_i64()).unwrap_or(0);

// Add the angle and update
page_dict.set("Rotate", (current_rotation + angle) % 360);
Expand All @@ -33,7 +31,6 @@ fn main() {
doc.save(output_file).unwrap();
}


#[cfg(feature = "async")]
#[tokio::main]
async fn main() {
Expand All @@ -50,15 +47,13 @@ async fn main() {
// but you can also set it on any node in the page tree and child pages will
// inherit the value.
for (_, page_id) in doc.get_pages() {
let page_dict = doc.get_object_mut(page_id)
let page_dict = doc
.get_object_mut(page_id)
.and_then(|obj| obj.as_dict_mut())
.expect("Missing page!");

// Get the current rotation if any; the default is 0
let current_rotation = page_dict
.get(b"Rotate")
.and_then(|obj| obj.as_i64())
.unwrap_or(0);
let current_rotation = page_dict.get(b"Rotate").and_then(|obj| obj.as_i64()).unwrap_or(0);

// Add the angle and update
page_dict.set("Rotate", (current_rotation + angle) % 360);
Expand Down
3 changes: 2 additions & 1 deletion rustfmt.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
format_strings = false
reorder_imports = true
fn_args_layout = "Compressed"
fn_params_layout = "Compressed"
max_width = 120
comment_width = 120
wrap_comments = true
2 changes: 1 addition & 1 deletion src/creator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ impl Document {
if page.has(b"Resources") {
if let Ok(_res_id) = page.get(b"Resources").and_then(Object::as_reference) {
// Find and return referenced object.
// Note: This returns an error because we can not have 2 mut barrow for `*self`.
// Note: This returns an error because we can not have 2 mut borrows for `*self`.
// self.get_object_mut(res_id)
Err(Error::ObjectNotFound)
} else {
Expand Down
2 changes: 1 addition & 1 deletion src/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -394,8 +394,8 @@ impl Document {
};
let content_object_id = self.add_object(Object::Stream(Stream::new(Dictionary::new(), content)));
current_content_list.push(Object::Reference(content_object_id));
// Set data

// Set data
let page_mut = self.get_object_mut(page_id).and_then(Object::as_dict_mut).unwrap();
page_mut.set("Contents", current_content_list);
Ok(())
Expand Down
2 changes: 1 addition & 1 deletion src/error.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use std::fmt;
use crate::encryption;
use std::fmt;

#[derive(Debug)]
pub enum Error {
Expand Down
2 changes: 1 addition & 1 deletion src/incremental_document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ impl IncrementalDocument {
if page.has(b"Resources") {
if let Ok(_res_id) = page.get(b"Resources").and_then(Object::as_reference) {
// Find and return referenced object.
// Note: This returns an error because we can not have 2 mut barrow for `*self`.
// Note: This returns an error because we can not have 2 mut borrows for `*self`.
// self.get_object_mut(res_id)
Err(Error::ObjectNotFound)
} else {
Expand Down
3 changes: 2 additions & 1 deletion src/nom_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,8 @@ pub fn indirect_object(
fn _indirect_object(
input: &[u8], offset: usize, expected_id: Option<ObjectId>, reader: &Reader,
) -> crate::Result<(ObjectId, Object)> {
let (i, (_, object_id)) = terminated(tuple((space, object_id)), pair(tag(b"obj"), space))(input).map_err(|_| Error::Parse { offset })?;
let (i, (_, object_id)) =
terminated(tuple((space, object_id)), pair(tag(b"obj"), space))(input).map_err(|_| Error::Parse { offset })?;
if let Some(expected_id) = expected_id {
if object_id != expected_id {
return Err(crate::error::Error::ObjectIdMismatch);
Expand Down
8 changes: 3 additions & 5 deletions src/parser_aux.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,11 +178,9 @@ impl Document {
let mut content = self.get_and_decode_page_content(page_id)?;
content.operations.insert(0, Operation::new("q", vec![]));
content.operations.push(Operation::new("Q", vec![]));
// content.operations.push(Operation::new("q", vec![]));
content
.operations
.push(Operation::new("Do", vec![Name(form_name.as_bytes().to_vec())]));
// content.operations.push(Operation::new("Q", vec![]));
let modified_content = content.encode()?;
self.add_xobject(page_id, form_name, form_id)?;

Expand Down Expand Up @@ -234,12 +232,12 @@ pub fn decode_xref_stream(mut stream: Stream) -> Result<(Xref, Dictionary)> {
};
match entry_type {
0 => {
//free object
// free object
read_big_endian_integer(&mut reader, bytes2.as_mut_slice())?;
read_big_endian_integer(&mut reader, bytes3.as_mut_slice())?;
}
1 => {
//normal object
// normal object
let offset = read_big_endian_integer(&mut reader, bytes2.as_mut_slice())?;
let generation = if !bytes3.is_empty() {
read_big_endian_integer(&mut reader, bytes3.as_mut_slice())?
Expand All @@ -249,7 +247,7 @@ pub fn decode_xref_stream(mut stream: Stream) -> Result<(Xref, Dictionary)> {
xref.insert((start + j) as u32, XrefEntry::Normal { offset, generation });
}
2 => {
//compressed object
// compressed object
let container = read_big_endian_integer(&mut reader, bytes2.as_mut_slice())?;
let index = read_big_endian_integer(&mut reader, bytes3.as_mut_slice())? as u16;
xref.insert((start + j) as u32, XrefEntry::Compressed { container, index });
Expand Down
6 changes: 3 additions & 3 deletions src/processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,14 +161,14 @@ impl Document {
}

/// Renumber objects with a custom starting id, this is very useful in case of multiple
/// document objects insertion in a single main document
/// document object insertions in a single main document
pub fn renumber_objects_with(&mut self, starting_id: u32) {
let mut replace = BTreeMap::new();
let mut new_id = starting_id;
let mut i = 0;

//lets check if we need to order the pages First as this means the first page doesnt have a lower ID
//So it ends up in a random spot based on its ID. We check first to avoid double transverse unless we have too.
// Check if we need to order the pages first, as this means the first page doesn't have a lower ID.
// So it ends up in a random spot based on its ID. We check first to avoid double traversal, unless we have too.

let mut page_order: Vec<(i32, (u32, u16))> = self
.page_iter()
Expand Down
9 changes: 4 additions & 5 deletions src/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,14 @@ use tokio::io::{AsyncRead, AsyncReadExt};
#[cfg(feature = "async")]
use tokio::pin;

use crate::parser;
use crate::error::XrefError;
use crate::object_stream::ObjectStream;
use crate::parser;
use crate::xref::XrefEntry;
use crate::{Document, Error, IncrementalDocument, Result, Object, ObjectId};
use crate::{Document, Error, IncrementalDocument, Object, ObjectId, Result};

type FilterFunc = fn((u32, u16), &mut Object) -> Option<((u32, u16), Object)>;


#[cfg(not(feature = "async"))]
impl Document {
/// Load a PDF document from a specified file path.
Expand Down Expand Up @@ -99,7 +98,7 @@ impl Document {
buffer: &buffer,
document: Document::new(),
}
.read(filter_func)
.read(filter_func)
}

/// Load a PDF document from a memory slice.
Expand Down Expand Up @@ -182,7 +181,7 @@ impl IncrementalDocument {
buffer: &buffer,
document: Document::new(),
}
.read(None)?;
.read(None)?;

Ok(IncrementalDocument::create_from(buffer, document))
}
Expand Down
13 changes: 6 additions & 7 deletions src/toc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,13 @@ impl Toc {

#[derive(Debug, Clone)]
pub struct Destination {
map: IndexMap<Vec<u8>, Object>, // Use IndexMap instead of BTreeMap
map: IndexMap<Vec<u8>, Object>,
}

#[allow(dead_code)]
impl Destination {
pub fn new(title: Object, page: Object, typ: Object) -> Self {
let mut map = IndexMap::new(); // Use IndexMap
let mut map = IndexMap::new();
map.insert(b"Title".to_vec(), title);
map.insert(b"Page".to_vec(), page);
map.insert(b"Type".to_vec(), typ);
Expand All @@ -58,7 +58,7 @@ impl Destination {
}
}

type OutlinePageIds = IndexMap<Vec<u8>, ((u32, u16), usize, usize)>; // Use IndexMap
type OutlinePageIds = IndexMap<Vec<u8>, ((u32, u16), usize, usize)>;

fn setup_outline_page_ids<'a>(
outlines: &'a Vec<Outline>, result: &mut OutlinePageIds, level: usize,
Expand All @@ -81,8 +81,7 @@ fn setup_outline_page_ids<'a>(

impl Document {
fn setup_page_id_to_num(&self) -> IndexMap<(u32, u16), u32> {
// Use IndexMap
let mut result = IndexMap::new(); // Use IndexMap
let mut result = IndexMap::new();
for (page_num, page_id) in self.get_pages() {
result.insert(page_id, page_num);
}
Expand All @@ -94,9 +93,9 @@ impl Document {
toc: Vec::new(),
errors: Vec::new(),
};
let mut named_destinations = IndexMap::new(); // Use IndexMap
let mut named_destinations = IndexMap::new();
if let Some(outlines) = self.get_outlines(None, None, &mut named_destinations)? {
let mut outline_page_ids = IndexMap::new(); // Use IndexMap
let mut outline_page_ids = IndexMap::new();
setup_outline_page_ids(&outlines, &mut outline_page_ids, 1);
let page_id_to_page_numbers = self.setup_page_id_to_num();
for (title, (page_id, _page_idx, level)) in outline_page_ids {
Expand Down
Loading
Loading