aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThomas Voss <mail@thomasvoss.com> 2023-08-02 05:50:59 +0200
committerThomas Voss <mail@thomasvoss.com> 2023-08-02 05:50:59 +0200
commit9611b995712b33dd1d77e768a93c1cfd7ca36116 (patch)
tree98b47b28167a996ab3cce2a0973afc09cd0ce4ba
parent35ee03cef2d78908e7d6771eb2735a93e4358a60 (diff)
Remove the error module and use cerm
Fuck proxit, all my homies hate proxit.
-rw-r--r--Cargo.lock14
-rw-r--r--Cargo.toml2
-rw-r--r--src/error.rs63
-rw-r--r--src/main.rs114
4 files changed, 74 insertions, 119 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 525871d..ed60ec2 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -21,6 +21,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
[[package]]
+name = "cerm"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae70b99554a0c7adb21b4792c6809f49692db19a904ad0c05fb7092645b26716"
+
+[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -75,18 +81,12 @@ checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0"
name = "mmv"
version = "0.1.0"
dependencies = [
+ "cerm",
"lexopt",
- "proxit",
"tempfile",
]
[[package]]
-name = "proxit"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "799e1d32ce9ca2aabd2ae1806c7673f2ec583b9260415f3416538865dacd2f70"
-
-[[package]]
name = "redox_syscall"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index 810f69d..fac1bb2 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -10,6 +10,6 @@ authors = [
]
[dependencies]
+cerm = "1.0.0"
lexopt = "0.1.0"
-proxit = "1.0.1"
tempfile = "3.7.0"
diff --git a/src/error.rs b/src/error.rs
deleted file mode 100644
index e592e50..0000000
--- a/src/error.rs
+++ /dev/null
@@ -1,63 +0,0 @@
-use std::{
- env,
- ffi::OsString,
- fmt::{self, Display, Formatter},
- io,
- path::PathBuf,
-};
-
-pub enum Error {
- BadArgs(Option<lexopt::Error>),
- BadDecoding(String),
- BadLengths,
- DuplicateInput(PathBuf),
- DuplicateOutput(PathBuf),
- IO(io::Error),
- Nop,
- SpawnFailed(OsString, io::Error),
-}
-
-impl Display for Error {
- fn fmt(&self, f: &mut Formatter) -> fmt::Result {
- let p = env::args().next().unwrap();
- match self {
- Self::BadArgs(o) => {
- if let Some(v) = o {
- writeln!(f, "{p}: {v}")?;
- }
- writeln!(f, "Usage: {p} [-0eiv] command [argument ...]")
- }
- Self::BadDecoding(s) => writeln!(f, "{p}: Decoding the file “{s}” failed!"),
- Self::BadLengths => writeln!(f, "{p}: Files have been added or removed during editing"),
- Self::DuplicateInput(s) => writeln!(
- f,
- "{p}: Input file “{}” specified more than once",
- s.to_string_lossy()
- ),
- Self::DuplicateOutput(s) => writeln!(
- f,
- "{p}: Output file “{}” specified more than once",
- s.to_string_lossy()
- ),
- Self::IO(e) => writeln!(f, "{p}: {e}"),
- Self::Nop => Ok(()),
- Self::SpawnFailed(ed, e) => writeln!(
- f,
- "{p}: Failed to spawn utility “{}”: {e}",
- ed.to_string_lossy()
- ),
- }
- }
-}
-
-impl From<io::Error> for Error {
- fn from(e: io::Error) -> Self {
- Self::IO(e)
- }
-}
-
-impl From<lexopt::Error> for Error {
- fn from(e: lexopt::Error) -> Self {
- Self::BadArgs(Some(e))
- }
-}
diff --git a/src/main.rs b/src/main.rs
index 383175d..68f8d0a 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,5 +1,3 @@
-mod error;
-
use std::{
cmp::Reverse,
collections::{hash_map::DefaultHasher, HashSet},
@@ -10,16 +8,14 @@ use std::{
io::{self, BufRead, BufReader, BufWriter, Write},
iter,
path::{Component, Path, PathBuf},
- process::{Command, Stdio},
+ process::{Command, Stdio, self},
};
use {
- proxit::MainResult,
+ cerm::{err, warn},
tempfile::tempdir,
};
-use error::Error;
-
#[derive(Default)]
struct Flags {
pub dryrun: bool,
@@ -29,27 +25,37 @@ struct Flags {
pub verbose: bool,
}
-fn main() -> MainResult<(), Error> {
- work().into()
+fn usage(bad_flags: Option<lexopt::Error>) -> ! {
+ let p = env::args().next().unwrap();
+ if let Some(e) = bad_flags {
+ warn!("{e}");
+ }
+ eprintln!("Usage: {p} [-0eiv] command [argument ...]");
+ process::exit(1);
}
-fn work() -> Result<(), Error> {
- let (flags, rest) = parse_args()?;
- let (cmd, args) = rest.split_first().ok_or(Error::BadArgs(None))?;
+fn main() {
+ if let Err(e) = work() {
+ err!("{e}");
+ }
+}
+
+fn work() -> Result<(), io::Error> {
+ let (flags, rest) = match parse_args() {
+ Ok(a) => a,
+ Err(e) => usage(Some(e))
+ };
+ let (cmd, args) = rest.split_first().unwrap_or_else(|| usage(None));
// Collect sources from standard input
- let srcs = io::stdin()
+ let srcs: Vec<_> = io::stdin()
.lines()
- .map(|l| {
- l.map_err(Error::from).and_then(|l| {
- if l.is_empty() {
- Err(Error::BadArgs(None))
- } else {
- Ok(l)
- }
- })
+ .map(|x| match x {
+ Err(e) => { err!("{e}"); },
+ Ok(l) if l.is_empty() => usage(None),
+ Ok(l) => l,
})
- .collect::<Result<Vec<String>, Error>>()?;
+ .collect();
// Spawn the child process
let mut child = Command::new(cmd)
@@ -57,21 +63,25 @@ fn work() -> Result<(), Error> {
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
- .map_err(|e| Error::SpawnFailed(cmd.to_owned(), e))?;
+ .unwrap_or_else(|e| {
+ err!("Failed to spawn utility “{}”: {e}", cmd.to_str().unwrap());
+ });
// Pass the source files to the child process.
- // TODO: Don’t use expect; create a custom error
{
let ci = child
.stdin
.take()
- .expect("Could not open the child process’ stdin");
+ .unwrap_or_else(|| {
+ err!("Could not open the child process’ stdin");
+ });
let mut ci = BufWriter::new(ci);
if flags.encode {
srcs.iter()
.try_for_each(|src| writeln!(ci, "{}", encode_string(src)))?;
} else {
- srcs.iter().try_for_each(|src| writeln!(ci, "{}", src))?;
+ srcs.iter()
+ .try_for_each(|src| writeln!(ci, "{}", src))?;
}
}
@@ -81,13 +91,15 @@ fn work() -> Result<(), Error> {
let co = child
.stdout
.take()
- .expect("Could not open the child process’ stdout.");
+ .unwrap_or_else(|| {
+ err!("Count not open the child process’ stdout.");
+ });
let co = BufReader::new(co);
// TODO: Don’t allocate an intermediary String per line, by using the BufReader buffer.
- co.lines().try_for_each(|dst| -> Result<(), Error> {
+ co.lines().try_for_each(|dst| -> Result<(), io::Error> {
if flags.encode {
- dsts.push(decode_string(&dst?)?);
+ dsts.push(decode_string(&dst?));
} else {
dsts.push(dst?);
}
@@ -95,14 +107,14 @@ fn work() -> Result<(), Error> {
})?;
if dsts.len() != srcs.len() {
- return Err(Error::BadLengths);
+ err!("Files have been added or removed during editing");
}
}
/* If the process failed, it is expected to print an error message; as such,
we exit directly. */
if !child.wait()?.success() {
- return Err(Error::Nop);
+ process::exit(1);
}
let mut uniq_srcs: HashSet<PathBuf> = HashSet::with_capacity(srcs.len());
@@ -112,15 +124,15 @@ fn work() -> Result<(), Error> {
let mut ps = srcs
.iter()
.zip(dsts)
- .map(|(s, d)| -> Result<(PathBuf, PathBuf, PathBuf), Error> {
+ .map(|(s, d)| -> Result<(PathBuf, PathBuf, PathBuf), io::Error> {
let s = fs::canonicalize(s)?;
let d = env::current_dir()?.join(Path::new(&d));
let d = normalize_path(&d);
if !uniq_srcs.insert(s.clone()) {
- Err(Error::DuplicateInput(s))
+ err!("Input file “{}” specified more than once", s.to_string_lossy());
} else if !uniq_dsts.insert(d.clone()) {
- Err(Error::DuplicateOutput(d))
+ err!("Output file “{}” specified more than once", d.to_string_lossy());
} else {
let mut hasher = DefaultHasher::new();
s.hash(&mut hasher);
@@ -129,17 +141,17 @@ fn work() -> Result<(), Error> {
Ok((s, t, d))
}
})
- .collect::<Result<Vec<_>, Error>>()?;
+ .collect::<Result<Vec<_>, io::Error>>()?;
/* Sort the src/dst pairs so that the sources with the longest componenets
come first. */
ps.sort_by_key(|s| Reverse(s.0.components().count()));
for (s, t, _) in ps.iter() {
- move_path(&flags, &s, &t)?;
+ move_path(&flags, &s, &t);
}
for (_, t, d) in ps.iter().rev() {
- move_path(&flags, &t, &d)?;
+ move_path(&flags, &t, &d);
}
Ok(())
@@ -188,7 +200,7 @@ fn encode_string(s: &str) -> String {
.collect::<String>()
}
-fn decode_string(s: &str) -> Result<String, Error> {
+fn decode_string(s: &str) -> String {
let mut pv = false;
s.chars()
@@ -215,7 +227,9 @@ fn decode_string(s: &str) -> Result<String, Error> {
})
.filter_map(Result::transpose)
.collect::<Result<String, ()>>()
- .map_err(|()| Error::BadDecoding(s.to_string()))
+ .unwrap_or_else(|_| {
+ err!("Decoding the file “{}” failed", s);
+ })
}
/* Blatantly stolen from the Cargo source code. This is MIT licensed. */
@@ -246,25 +260,29 @@ fn normalize_path(path: &Path) -> PathBuf {
ret
}
-fn move_path(flags: &Flags, from: &PathBuf, to: &PathBuf) -> io::Result<()> {
+fn move_path(flags: &Flags, from: &PathBuf, to: &PathBuf) {
if flags.verbose {
println!("{} -> {}", from.as_path().display(), to.as_path().display());
}
if !flags.dryrun {
- copy_and_remove_file_or_dir(&from, &to)?;
+ copy_and_remove_file_or_dir(&from, &to).unwrap_or_else(|(f, e)| {
+ err!("{}: {e}", f.to_string_lossy());
+ });
}
-
- Ok(())
}
-fn copy_and_remove_file_or_dir<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> io::Result<()> {
- let data = fs::metadata(&from)?;
+fn copy_and_remove_file_or_dir<'a>(
+ from: &'a PathBuf,
+ to: &'a PathBuf
+) -> Result<(), (&'a PathBuf, io::Error)> {
+ let data = fs::metadata(&from).map_err(|e| (from, e))?;
if data.is_dir() {
- fs::create_dir(&to)?;
- fs::remove_dir(&from)
+ fs::create_dir(&to).map_err(|e| (to, e))?;
+ fs::remove_dir(&from).map_err(|e| (from, e))?
} else {
- fs::copy(&from, &to)?;
- fs::remove_file(&from)
+ fs::copy(&from, &to).map_err(|e| (to, e))?;
+ fs::remove_file(&from).map_err(|e| (from, e))?
}
+ Ok(())
}