/
main.rs
1285 lines (1168 loc) · 43.9 KB
/
main.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use curve25519_parser::{
generate_keypair, parse_openssl_25519_privkey, parse_openssl_25519_pubkey, StaticSecret,
};
use glob::Pattern;
use hkdf::Hkdf;
use humansize::{FormatSize, DECIMAL};
use lru::LruCache;
use mla::config::{ArchiveReaderConfig, ArchiveWriterConfig};
use mla::errors::{Error, FailSafeReadError};
use mla::helpers::linear_extract;
use mla::layers::compress::CompressionLayerReader;
use mla::layers::encrypt::EncryptionLayerReader;
use mla::layers::raw::RawLayerReader;
use mla::layers::traits::{InnerReaderTrait, LayerReader};
use mla::{
ArchiveFailSafeReader, ArchiveFile, ArchiveFooter, ArchiveHeader, ArchiveReader, ArchiveWriter,
Layers,
};
use rand::SeedableRng;
use rand_chacha::ChaChaRng;
use sha2::{Digest, Sha512};
use std::collections::{HashMap, HashSet};
use std::error;
use std::fmt;
use std::fs::{self, read_dir, File};
use std::io::{self, BufRead};
use std::io::{Read, Seek, Write};
use std::num::NonZeroUsize;
use std::path::{Component, Path, PathBuf};
use std::sync::Mutex;
use tar::{Builder, Header};
use zeroize::Zeroize;
// ----- Error ------
#[derive(Debug)]
pub enum MlarError {
/// Wrap a MLA error
MlaError(Error),
/// IO Error (not enough data, etc.)
IOError(io::Error),
/// A private key has been provided, but it is not required
PrivateKeyProvidedButNotUsed,
/// Configuration error
ConfigError(mla::errors::ConfigError),
}
impl fmt::Display for MlarError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// For now, use the debug derived version
write!(f, "{self:?}")
}
}
impl From<Error> for MlarError {
fn from(error: Error) -> Self {
MlarError::MlaError(error)
}
}
impl From<io::Error> for MlarError {
fn from(error: io::Error) -> Self {
MlarError::IOError(error)
}
}
impl From<mla::errors::ConfigError> for MlarError {
fn from(error: mla::errors::ConfigError) -> Self {
MlarError::ConfigError(error)
}
}
impl error::Error for MlarError {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match &self {
MlarError::IOError(err) => Some(err),
MlarError::MlaError(err) => Some(err),
MlarError::ConfigError(err) => Some(err),
_ => None,
}
}
}
// ----- Utils ------
/// Allow for different kind of output. As ArchiveWriter is parametrized over
/// a Writable type, ArchiveWriter<File> and ArchiveWriter<io::stdout>
/// can't coexist in the same code path.
enum OutputTypes {
Stdout,
File { file: File },
}
impl Write for OutputTypes {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self {
OutputTypes::Stdout => io::stdout().write(buf),
OutputTypes::File { file } => file.write(buf),
}
}
fn flush(&mut self) -> io::Result<()> {
match self {
OutputTypes::Stdout => io::stdout().flush(),
OutputTypes::File { file } => file.flush(),
}
}
}
fn open_ecc_private_keys(matches: &ArgMatches) -> Result<Vec<x25519_dalek::StaticSecret>, Error> {
let mut private_keys = Vec::new();
if let Some(private_key_args) = matches.get_many::<PathBuf>("private_keys") {
for private_key_arg in private_key_args {
let mut file = File::open(private_key_arg)?;
// Load the the ECC key in-memory and parse it
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
match parse_openssl_25519_privkey(&buf) {
Err(_) => return Err(Error::InvalidECCKeyFormat),
Ok(private_key) => private_keys.push(private_key),
};
}
};
Ok(private_keys)
}
fn open_ecc_public_keys(matches: &ArgMatches) -> Result<Vec<x25519_dalek::PublicKey>, Error> {
let mut public_keys = Vec::new();
if let Some(public_key_args) = matches.get_many::<PathBuf>("public_keys") {
for public_key_arg in public_key_args {
let mut file = File::open(public_key_arg)?;
// Load the the ECC key in-memory and parse it
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
match parse_openssl_25519_pubkey(&buf) {
Err(_) => return Err(Error::InvalidECCKeyFormat),
Ok(public_key) => public_keys.push(public_key),
};
}
}
Ok(public_keys)
}
/// Return the ArchiveWriterConfig corresponding to provided arguments
fn config_from_matches(matches: &ArgMatches) -> ArchiveWriterConfig {
let mut config = ArchiveWriterConfig::new();
// Get layers
let mut layers = Vec::new();
if matches.contains_id("layers") {
// Safe to use unwrap() because of the is_present() test
for layer in matches.get_many::<String>("layers").unwrap() {
layers.push(layer.as_str());
}
} else {
// Default
layers.push("compress");
layers.push("encrypt");
};
for layer in layers {
if layer == "compress" {
config.enable_layer(Layers::COMPRESS);
} else if layer == "encrypt" {
config.enable_layer(Layers::ENCRYPT);
} else {
panic!("[ERROR] Unknown layer {}", layer);
}
}
// Encryption specifics
if matches.contains_id("public_keys") {
if !config.is_layers_enabled(Layers::ENCRYPT) {
eprintln!(
"[WARNING] 'public_keys' argument ignored, because 'encrypt' layer is not enabled"
);
} else {
let public_keys = match open_ecc_public_keys(matches) {
Ok(public_keys) => public_keys,
Err(error) => {
panic!("[ERROR] Unable to open public keys: {}", error);
}
};
config.add_public_keys(&public_keys);
}
}
// Compression specifics
if matches.contains_id("compression_level") {
if !config.is_layers_enabled(Layers::COMPRESS) {
eprintln!("[WARNING] 'compression_level' argument ignored, because 'compress' layer is not enabled");
} else {
let comp_level: u32 = *matches
.get_one::<u32>("compression_level")
.expect("compression_level must be an int");
if comp_level > 11 {
panic!("compression_level must be in [0 .. 11]");
}
config.with_compression_level(comp_level).unwrap();
}
}
config
}
fn destination_from_output_argument(output_argument: &PathBuf) -> Result<OutputTypes, MlarError> {
let destination = if output_argument.as_os_str() != "-" {
let path = Path::new(&output_argument);
OutputTypes::File {
file: File::create(path)?,
}
} else {
OutputTypes::Stdout
};
Ok(destination)
}
/// Return an ArchiveWriter corresponding to provided arguments
fn writer_from_matches<'a>(
matches: &ArgMatches,
) -> Result<ArchiveWriter<'a, OutputTypes>, MlarError> {
let config = config_from_matches(matches);
// Safe to use unwrap() because the option is required()
let output = matches.get_one::<PathBuf>("output").unwrap();
let destination = destination_from_output_argument(output)?;
// Instantiate output writer
Ok(ArchiveWriter::from_config(destination, config)?)
}
/// Return the ArchiveReaderConfig corresponding to provided arguments and set
/// Layers::ENCRYPT if a key is provided
fn readerconfig_from_matches(matches: &ArgMatches) -> ArchiveReaderConfig {
let mut config = ArchiveReaderConfig::new();
if matches.contains_id("private_keys") {
let private_keys = match open_ecc_private_keys(matches) {
Ok(private_keys) => private_keys,
Err(error) => {
panic!("[ERROR] Unable to open private keys: {}", error);
}
};
config.add_private_keys(&private_keys);
config.layers_enabled.insert(Layers::ENCRYPT);
}
config
}
fn open_mla_file<'a>(matches: &ArgMatches) -> Result<ArchiveReader<'a, File>, MlarError> {
let config = readerconfig_from_matches(matches);
// Safe to use unwrap() because the option is required()
let mla_file = matches.get_one::<PathBuf>("input").unwrap();
let path = Path::new(&mla_file);
let mut file = File::open(path)?;
// If a decryption key is provided, assume the user expects the file to be encrypted
// If not, avoid opening it
file.rewind()?;
let header = ArchiveHeader::from(&mut file)?;
if config.layers_enabled.contains(Layers::ENCRYPT)
&& !header.config.layers_enabled.contains(Layers::ENCRYPT)
{
eprintln!("[-] A private key has been provided, but the archive is not encrypted");
return Err(MlarError::PrivateKeyProvidedButNotUsed);
}
file.rewind()?;
// Instantiate reader
Ok(ArchiveReader::from_config(file, config)?)
}
// Utils: common code to load a mla_file from arguments, fail-safe mode
fn open_failsafe_mla_file<'a>(
matches: &ArgMatches,
) -> Result<ArchiveFailSafeReader<'a, File>, MlarError> {
let config = readerconfig_from_matches(matches);
// Safe to use unwrap() because the option is required()
let mla_file = matches.get_one::<PathBuf>("input").unwrap();
let path = Path::new(&mla_file);
let file = File::open(path)?;
// Instantiate reader
Ok(ArchiveFailSafeReader::from_config(file, config)?)
}
fn add_file_to_tar<R: Read, W: Write>(
tar_file: &mut Builder<W>,
sub_file: ArchiveFile<R>,
) -> io::Result<()> {
// Use indexes to avoid in-memory copy
let mut header = Header::new_gnu();
header.set_size(sub_file.size);
header.set_mode(0o444); // Create files as read-only
header.set_cksum();
// Force relative path, the trivial way (does not support Windows paths)
let filename = {
if Path::new(&sub_file.filename).is_absolute() {
format!("./{}", sub_file.filename)
} else {
sub_file.filename
}
};
tar_file.append_data(&mut header, &filename, sub_file.data)
}
/// Arguments for action 'extract' to match file names in the archive
enum ExtractFileNameMatcher {
/// Match a list of files, where the order does not matter
Files(HashSet<String>),
/// Match a list of glob patterns
GlobPatterns(Vec<Pattern>),
/// No matching argument has been provided, so match all files
Anything,
}
impl ExtractFileNameMatcher {
fn from_matches(matches: &ArgMatches) -> Self {
let files = match matches.get_many::<String>("files") {
Some(values) => values,
None => return ExtractFileNameMatcher::Anything,
};
if matches.get_flag("glob") {
// Use glob patterns
ExtractFileNameMatcher::GlobPatterns(
files
.map(|pat| {
Pattern::new(pat)
.map_err(|err| {
eprintln!("[!] Invalid glob pattern {pat:?} ({err:?})");
})
.expect("Invalid glob pattern")
})
.collect(),
)
} else {
// Use file names
ExtractFileNameMatcher::Files(files.map(|s| s.to_string()).collect())
}
}
fn match_file_name(&self, file_name: &str) -> bool {
match self {
ExtractFileNameMatcher::Files(ref files) => {
files.is_empty() || files.contains(file_name)
}
ExtractFileNameMatcher::GlobPatterns(ref patterns) => {
patterns.is_empty() || patterns.iter().any(|pat| pat.matches(file_name))
}
ExtractFileNameMatcher::Anything => true,
}
}
}
/// Compute the full path of the final file, using defensive measures
/// similar as what tar-rs does for `Entry::unpack_in`:
/// https://github.com/alexcrichton/tar-rs/blob/0.4.26/src/entry.rs#L344
fn get_extracted_path(output_dir: &Path, file_name: &str) -> Option<PathBuf> {
let mut file_dst = output_dir.to_path_buf();
for part in Path::new(&file_name).components() {
match part {
// Leading '/' characters, root paths, and '.'
// components are just ignored and treated as "empty
// components"
Component::Prefix(..) | Component::RootDir | Component::CurDir => continue,
// If any part of the filename is '..', then skip over
// unpacking the file to prevent directory traversal
// security issues. See, e.g.: CVE-2001-1267,
// CVE-2002-0399, CVE-2005-1918, CVE-2007-4131
Component::ParentDir => {
eprintln!("[!] Skipping file \"{file_name}\" because it contains \"..\"");
return None;
}
Component::Normal(part) => file_dst.push(part),
}
}
Some(file_dst)
}
/// Create a file and associate parent directories in a given output directory
fn create_file<P1: AsRef<Path>>(
output_dir: P1,
fname: &str,
) -> Result<Option<(File, PathBuf)>, MlarError> {
let extracted_path = match get_extracted_path(output_dir.as_ref(), fname) {
Some(p) => p,
None => return Ok(None),
};
// Create all directories leading to the file
let containing_directory = match extracted_path.parent() {
Some(p) => p,
None => {
eprintln!(
"[!] Skipping file \"{}\" because it does not have a parent (from {})",
&fname,
extracted_path.display()
);
return Ok(None);
}
};
if !containing_directory.exists() {
fs::create_dir_all(containing_directory).map_err(|err| {
eprintln!(
" [!] Error while creating output directory path for \"{}\" ({:?})",
output_dir.as_ref().display(),
err
);
err
})?;
}
// Ensure that the containing directory is in the output dir
let containing_directory = fs::canonicalize(containing_directory).map_err(|err| {
eprintln!(
" [!] Error while canonicalizing extracted file output directory path \"{}\" ({:?})",
containing_directory.display(),
err
);
err
})?;
if !containing_directory.starts_with(output_dir) {
eprintln!(
" [!] Skipping file \"{}\" because it would be extracted outside of the output directory, in {}",
fname, containing_directory.display()
);
return Ok(None);
}
Ok(Some((
File::create(&extracted_path).map_err(|err| {
eprintln!(" [!] Unable to create \"{fname}\" ({err:?})");
err
})?,
extracted_path,
)))
}
/// Wrapper with Write, to append data to a file
///
/// This wrapper is used to avoid opening all files simultaneously, potentially
/// reaching the filesystem limit, but rather appending to file on-demand
///
/// A limited pool of active file, in a LRU cache, is used to avoid too many open-close
struct FileWriter<'a> {
/// Target file for data appending
path: PathBuf,
/// Reference on the cache
// A `Mutex` is used instead of a `RefCell` as `FileWriter` can be `Send`
cache: &'a Mutex<LruCache<PathBuf, File>>,
/// Is verbose mode enabled
verbose: bool,
/// Filename in the archive
fname: &'a str,
}
/// Max number of fd simultaneously opened
pub const FILE_WRITER_POOL_SIZE: usize = 1000;
impl<'a> Write for FileWriter<'a> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
// Only one thread is using the FileWriter, safe to `.unwrap()`
let mut cache = self.cache.lock().unwrap();
if !cache.contains(&self.path) {
let file = fs::OpenOptions::new().append(true).open(&self.path)?;
cache.put(self.path.clone(), file);
if self.verbose {
println!("{}", self.fname);
}
}
// Safe to `unwrap` here cause we ensure the element is in the cache (mono-threaded)
let file = cache.get_mut(&self.path).unwrap();
file.write(buf)
// `file` will be closed on deletion from the cache
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
/// Add whatever is specified by `path`
fn add_file_or_dir(mla: &mut ArchiveWriter<OutputTypes>, path: &Path) -> Result<(), MlarError> {
if path.is_dir() {
add_dir(mla, path)?;
} else {
// This can lead to some non-obvious DuplicateFilename error (files
// that appear with different file names in the filesystem
// but mlar raising DuplicateFilename)
let filename = path.to_string_lossy();
let file = File::open(path)?;
let length = file.metadata()?.len();
eprintln!("{filename}");
mla.add_file(&filename, length, file)?;
}
Ok(())
}
/// Recursively explore a dir to add all the files
/// Ignore empty directory
fn add_dir(mla: &mut ArchiveWriter<OutputTypes>, dir: &Path) -> Result<(), MlarError> {
for file in read_dir(dir)? {
let new_path = file?.path();
add_file_or_dir(mla, &new_path)?;
}
Ok(())
}
fn add_from_stdin(mla: &mut ArchiveWriter<OutputTypes>) -> Result<(), MlarError> {
for line in io::stdin().lock().lines() {
add_file_or_dir(mla, Path::new(&line?))?;
}
Ok(())
}
// ----- Commands ------
fn create(matches: &ArgMatches) -> Result<(), MlarError> {
let mut mla = writer_from_matches(matches)?;
if let Some(files) = matches.get_many::<PathBuf>("files") {
for filename in files {
if filename.as_os_str() == "-" {
add_from_stdin(&mut mla)?;
} else {
let path = Path::new(&filename);
add_file_or_dir(&mut mla, path)?;
}
}
};
mla.finalize()?;
Ok(())
}
fn list(matches: &ArgMatches) -> Result<(), MlarError> {
let mut mla = open_mla_file(matches)?;
let mut iter: Vec<String> = mla.list_files()?.cloned().collect();
iter.sort();
for fname in iter {
if matches.get_count("verbose") == 0 {
println!("{fname}");
} else {
let mla_file = mla.get_file(fname)?.expect("Unable to get the file");
let filename = mla_file.filename;
let size = mla_file.size.format_size(DECIMAL);
if matches.get_count("verbose") == 1 {
println!("{filename} - {size}");
} else if matches.get_count("verbose") >= 2 {
let hash = mla.get_hash(&filename)?.expect("Unable to get the hash");
println!("{} - {} ({})", filename, size, hex::encode(hash),);
}
}
}
Ok(())
}
fn extract(matches: &ArgMatches) -> Result<(), MlarError> {
let file_name_matcher = ExtractFileNameMatcher::from_matches(matches);
let output_dir = Path::new(matches.get_one::<PathBuf>("outputdir").unwrap());
let verbose = matches.get_flag("verbose");
let mut mla = open_mla_file(matches)?;
// Create the output directory, if it does not exist
if !output_dir.exists() {
fs::create_dir(output_dir).map_err(|err| {
eprintln!(
" [!] Error while creating output directory \"{}\" ({:?})",
output_dir.display(),
err
);
err
})?;
}
let output_dir = fs::canonicalize(output_dir).map_err(|err| {
eprintln!(
" [!] Error while canonicalizing output directory path \"{}\" ({:?})",
output_dir.display(),
err
);
err
})?;
let mut iter: Vec<String> = mla.list_files()?.cloned().collect();
iter.sort();
if let ExtractFileNameMatcher::Anything = file_name_matcher {
// Optimisation: use linear extraction
if verbose {
println!("Extracting the whole archive using a linear extraction");
}
let cache = Mutex::new(LruCache::new(
NonZeroUsize::new(FILE_WRITER_POOL_SIZE).unwrap(),
));
let mut export: HashMap<&String, FileWriter> = HashMap::new();
for fname in &iter {
match create_file(&output_dir, fname)? {
Some((_file, path)) => {
export.insert(
fname,
FileWriter {
path,
cache: &cache,
verbose,
fname,
},
);
}
None => continue,
}
}
return Ok(linear_extract(&mut mla, &mut export)?);
}
for fname in iter {
// Filter files according to glob patterns or files given as parameters
if !file_name_matcher.match_file_name(&fname) {
continue;
}
// Look for the file in the archive
let mut sub_file = match mla.get_file(fname.clone()) {
Err(err) => {
eprintln!(" [!] Error while looking up subfile \"{fname}\" ({err:?})");
continue;
}
Ok(None) => {
eprintln!(" [!] Subfile \"{fname}\" indexed in metadata could not be found");
continue;
}
Ok(Some(subfile)) => subfile,
};
let (mut extracted_file, _path) = match create_file(&output_dir, &fname)? {
Some(file) => file,
None => continue,
};
if verbose {
println!("{fname}");
}
io::copy(&mut sub_file.data, &mut extracted_file).map_err(|err| {
eprintln!(" [!] Unable to extract \"{fname}\" ({err:?})");
err
})?;
}
Ok(())
}
fn cat(matches: &ArgMatches) -> Result<(), MlarError> {
let files_values = matches.get_many::<String>("files").unwrap();
let output = matches.get_one::<PathBuf>("output").unwrap();
let mut destination = destination_from_output_argument(output)?;
let mut mla = open_mla_file(matches)?;
if matches.get_flag("glob") {
// For each glob patterns, enumerate matching files and display them
let mut archive_files: Vec<String> = mla.list_files()?.cloned().collect();
archive_files.sort();
for arg_pattern in files_values {
let pat = match Pattern::new(arg_pattern) {
Ok(pat) => pat,
Err(err) => {
eprintln!(" [!] Invalid glob pattern {arg_pattern:?} ({err:?})");
continue;
}
};
for fname in archive_files.iter() {
if !pat.matches(fname) {
continue;
}
match mla.get_file(fname.to_string()) {
Err(err) => {
eprintln!(" [!] Error while looking up file \"{fname}\" ({err:?})");
continue;
}
Ok(None) => {
eprintln!(
" [!] Subfile \"{fname}\" indexed in metadata could not be found"
);
continue;
}
Ok(Some(mut subfile)) => {
io::copy(&mut subfile.data, &mut destination).map_err(|err| {
eprintln!(" [!] Unable to extract \"{fname}\" ({err:?})");
err
})?;
}
}
}
}
} else {
// Retrieve all the files that are specified
for fname in files_values {
match mla.get_file(fname.to_string()) {
Err(err) => {
eprintln!(" [!] Error while looking up file \"{fname}\" ({err:?})");
continue;
}
Ok(None) => {
eprintln!(" [!] File not found: \"{fname}\"");
continue;
}
Ok(Some(mut subfile)) => {
io::copy(&mut subfile.data, &mut destination).map_err(|err| {
eprintln!(" [!] Unable to extract \"{fname}\" ({err:?})");
err
})?;
}
}
}
}
Ok(())
}
fn to_tar(matches: &ArgMatches) -> Result<(), MlarError> {
let mut mla = open_mla_file(matches)?;
// Safe to use unwrap() because the option is required()
let output = matches.get_one::<PathBuf>("output").unwrap();
let destination = destination_from_output_argument(output)?;
let mut tar_file = Builder::new(destination);
let mut archive_files: Vec<String> = mla.list_files()?.cloned().collect();
archive_files.sort();
for fname in archive_files {
let sub_file = match mla.get_file(fname.clone()) {
Err(err) => {
eprintln!(" [!] Error while looking up subfile \"{fname}\" ({err:?})");
continue;
}
Ok(None) => {
eprintln!(" [!] Subfile \"{fname}\" indexed in metadata could not be found");
continue;
}
Ok(Some(subfile)) => subfile,
};
if let Err(err) = add_file_to_tar(&mut tar_file, sub_file) {
eprintln!(" [!] Unable to add subfile \"{fname}\" to tarball ({err:?})");
}
}
Ok(())
}
fn repair(matches: &ArgMatches) -> Result<(), MlarError> {
let mut mla = open_failsafe_mla_file(matches)?;
let mut mla_out = writer_from_matches(matches)?;
// Convert
let status = mla.convert_to_archive(&mut mla_out)?;
match status {
FailSafeReadError::NoError => {}
FailSafeReadError::EndOfOriginalArchiveData => {
eprintln!("[WARNING] The whole archive has been recovered");
}
_ => {
eprintln!("[WARNING] Conversion ends with {status}");
}
};
Ok(())
}
fn convert(matches: &ArgMatches) -> Result<(), MlarError> {
let mut mla = open_mla_file(matches)?;
let mut fnames: Vec<String> = if let Ok(iter) = mla.list_files() {
// Read the file list using metadata
iter.cloned().collect()
} else {
panic!("Files is malformed. Please consider repairing the file");
};
fnames.sort();
let mut mla_out = writer_from_matches(matches)?;
// Convert
for fname in fnames {
eprintln!("{fname}");
let sub_file = match mla.get_file(fname.clone()) {
Err(err) => {
eprintln!("Error while adding {fname} ({err:?})");
continue;
}
Ok(None) => {
eprintln!("Unable to found {fname}");
continue;
}
Ok(Some(mla)) => mla,
};
mla_out.add_file(&sub_file.filename, sub_file.size, sub_file.data)?;
}
mla_out.finalize().expect("Finalization error");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn keygen(matches: &ArgMatches) -> Result<(), MlarError> {
// Safe to use unwrap() because of the requirement
let output_base = matches.get_one::<PathBuf>("output").unwrap();
let mut output_pub = File::create(Path::new(output_base).with_extension("pub"))
.expect("Unable to create the public file");
let mut output_priv = File::create(output_base).expect("Unable to create the private file");
// handle seed
//
// if set, seed the PRNG with `SHA512(seed bytes as UTF8)[0..32]`
// if not, seed the PRNG with the dedicated API
let mut csprng = match matches.get_one::<String>("seed") {
Some(seed) => {
eprintln!(
"[WARNING] A seed-based keygen operation is deterministic. An attacker knowing the seed knows the private key and is able to decrypt associated messages"
);
let mut hseed = [0u8; 32];
hseed.copy_from_slice(&Sha512::digest(seed.as_bytes())[0..32]);
ChaChaRng::from_seed(hseed)
}
None => ChaChaRng::from_entropy(),
};
let key_pair = generate_keypair(&mut csprng).expect("Error while generating the key-pair");
// Output the public key in PEM format, to ease integration in text based
// configs
output_pub
.write_all(key_pair.public_as_pem().as_bytes())
.expect("Error writing the public key");
// Output the private key in DER format, to avoid common mistakes
output_priv
.write_all(&key_pair.private_der)
.expect("Error writing the private key");
Ok(())
}
const DERIVE_PATH_SALT: &[u8; 15] = b"PATH DERIVATION";
/// Derive a Curve25519 secret along a path and return a seed
///
/// HKDF(salt="PATH DERIVATION", ikm=Parent Key, info=Derivation path) -> seed
fn apply_derive(path: &str, mut src: StaticSecret) -> [u8; 32] {
let hkdf: Hkdf<Sha512> = Hkdf::new(Some(DERIVE_PATH_SALT), &src.to_bytes());
let mut seed = [0u8; 32];
hkdf.expand(path.as_bytes(), &mut seed)
.expect("[ERROR] Error while expanding the key");
src.zeroize();
seed
}
#[allow(clippy::unnecessary_wraps)]
fn keyderive(matches: &ArgMatches) -> Result<(), MlarError> {
// Safe to use unwrap() because of the requirement
let output_base = matches.get_one::<PathBuf>("output").unwrap();
let mut output_pub = File::create(Path::new(output_base).with_extension("pub"))
.expect("Unable to create the public file");
let mut output_priv = File::create(output_base).expect("Unable to create the private file");
// Safe to use unwrap() because of the requirement
let private_key_arg = matches.get_one::<PathBuf>("input").unwrap();
let mut file = File::open(private_key_arg)?;
// Load the the ECC key in-memory and parse it
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let mut secret =
parse_openssl_25519_privkey(&buf).expect("[ERROR] Unable to read the private key");
// Derive the key along the path
let mut key_pair = None;
for path in matches
.get_many::<String>("path")
.expect("[ERROR] At least one path must be provided")
{
let mut csprng = ChaChaRng::from_seed(apply_derive(path, secret));
// Use the high-level API to avoid duplicating code from curve25519-parser in case of futur changes
key_pair =
Some(generate_keypair(&mut csprng).expect("Error while generating the key-pair"));
secret = parse_openssl_25519_privkey(&key_pair.as_ref().unwrap().private_der).unwrap();
}
// Safe to unwrap, there is at least one derivation path
let key_pair = key_pair.unwrap();
// Output the public key in PEM format, to ease integration in text based
// configs
output_pub
.write_all(key_pair.public_as_pem().as_bytes())
.expect("Error writing the public key");
// Output the private key in DER format, to avoid common mistakes
output_priv
.write_all(&key_pair.private_der)
.expect("Error writing the private key");
Ok(())
}
pub struct ArchiveInfoReader {
/// MLA Archive format Reader
/// User's reading configuration
pub config: ArchiveReaderConfig,
/// Compressed sizes from CompressionLayer
pub compressed_size: Option<u64>,
/// Metadata (from footer if any)
metadata: Option<ArchiveFooter>,
}
impl ArchiveInfoReader {
pub fn from_config<'a, R>(
mut src: R,
mut config: ArchiveReaderConfig,
) -> Result<Self, MlarError>
where
R: 'a + InnerReaderTrait,
{
// Make sure we read the archive header from the start
src.rewind()?;
let header = ArchiveHeader::from(&mut src)?;
config.load_persistent(header.config)?;
// Pin the current position (after header) as the new 0
let mut raw_src = Box::new(RawLayerReader::new(src));
raw_src.reset_position()?;
// Enable layers depending on user option. Order is relevant
let mut src: Box<dyn 'a + LayerReader<'a, R>> = raw_src;
if config.layers_enabled.contains(Layers::ENCRYPT) {
src = Box::new(EncryptionLayerReader::new(src, &config.encrypt)?)
}
let compressed_size = if config.layers_enabled.contains(Layers::COMPRESS) {
let mut src_compress = Box::new(CompressionLayerReader::new(src)?);
src_compress.initialize()?;
let size = src_compress
.sizes_info
.as_ref()
.map(|v| v.get_compressed_size());
src = src_compress;
size
} else {
src.initialize()?;
None
};
let metadata = Some(ArchiveFooter::deserialize_from(&mut src)?);
src.rewind()?;
Ok(ArchiveInfoReader {
config,
compressed_size,
metadata,
})
}
pub fn get_files_size(&self) -> Result<u64, MlarError> {
if let Some(ArchiveFooter { files_info, .. }) = &self.metadata {
Ok(files_info.values().map(|f| f.size).sum())
} else {
Err(Error::MissingMetadata.into())
}
}
}
fn info(matches: &ArgMatches) -> Result<(), MlarError> {
// Safe to use unwrap() because the option is required()
let mla_file = matches.get_one::<PathBuf>("input").unwrap();
let path = Path::new(&mla_file);
let mut file = File::open(path)?;
// Get Header
let header = ArchiveHeader::from(&mut file)?;
let encryption = header.config.layers_enabled.contains(Layers::ENCRYPT);
let compression = header.config.layers_enabled.contains(Layers::COMPRESS);
// Instantiate reader as needed
let mla = if compression {
let config = readerconfig_from_matches(matches);
Some(ArchiveInfoReader::from_config(file, config)?)
} else {
None
};
// Format Version
println!("Format version: {}", header.format_version);
// Encryption config
println!("Encryption: {encryption}");
if encryption && matches.get_flag("verbose") {
let encrypt_config = header.config.encrypt.expect("Encryption config not found");
println!(