Implement groundwork for challenge 13.

This commit is contained in:
2022-06-20 18:33:41 -04:00
parent 961f75bb7d
commit db9f9d265b
3 changed files with 100 additions and 6 deletions

View File

@@ -2,6 +2,7 @@ mod bytes;
mod bytes_base64;
mod cbc;
mod ecb;
mod parser;
mod set1;
mod set2;
@@ -13,10 +14,10 @@ fn main() {
// set1::challenge5();
// set1::challenge6();
// set1::challenge7();
set1::challenge8();
set2::challenge9();
set2::challenge10();
set2::challenge11();
set2::challenge12();
// set1::challenge8();
// set2::challenge9();
// set2::challenge10();
// set2::challenge11();
// set2::challenge12();
set2::challenge13();
}

65
src/parser.rs Normal file
View File

@@ -0,0 +1,65 @@
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq)]
pub enum Token {
Identifier(String),
Equal,
Ampersand,
}
pub type Tokens = Vec<Token>;
pub fn parse_key_value(text: &str) -> HashMap<String, String> {
/* Parses foo=bar&baz=qux&zap=zazzle into respective hashmap. */
let mut tokens: Tokens = vec![];
let mut result = HashMap::new();
tokens = scan(text, 0, tokens);
for token_chunk in tokens.chunks(4) {
match &token_chunk[..] {
[Token::Identifier(key), Token::Equal, Token::Identifier(value), Token::Ampersand] => {
result.insert(key.to_string(), value.to_string());
}
[Token::Identifier(key), Token::Equal, Token::Identifier(value)] => {
result.insert(key.to_string(), value.to_string());
}
_ => panic!("Could not parse {:?}", token_chunk),
};
}
result
}
fn scan(code: &str, mut ix: usize, mut tokens: Tokens) -> Tokens {
if ix == code.len() {
return tokens;
}
let c: char = code[ix..ix + 1].chars().next().unwrap();
if c.is_ascii_alphanumeric() || SPECIAL_CHARS.contains(&c) {
return scan_identifier(code, ix, tokens);
} else if c == '&' {
tokens.push(Token::Ampersand);
} else if c == '=' {
tokens.push(Token::Equal);
} else {
panic!("Unexpected char '{}' at index {}", c, ix);
}
ix += 1;
scan(code, ix, tokens)
}
fn scan_identifier(code: &str, mut ix: usize, mut tokens: Tokens) -> Tokens {
let start_ix = ix;
let mut chars = code[ix..].chars();
while let Some(c) = chars.next() {
if c.is_ascii_alphanumeric() || SPECIAL_CHARS.contains(&c) {
ix += 1;
} else {
break;
}
}
let token = Token::Identifier(code[start_ix..ix].to_string());
tokens.push(token);
scan(code, ix, tokens)
}
const SPECIAL_CHARS: &[char] = &['.', '@'];

View File

@@ -2,7 +2,9 @@ use crate::bytes::Bytes;
use crate::bytes_base64::BytesBase64;
use crate::cbc;
use crate::ecb;
use crate::parser;
use rand::Rng;
use std::collections::HashMap;
pub fn challenge9() {
let mut bytes = Bytes::from_utf8("YELLOW SUBMARINE");
@@ -183,7 +185,11 @@ pub fn challenge12() {
assert_eq!(is_encryption_ecb(&key), true); // 2. confirm oracle uses ecb
let rountrip_text = decode(&key); // 3.-6.
let clear_text = read("data/12.txt");
// 138 because I don't know where that additional byte is from
// 138 (instead of 139); I think we get one additional byte because we guess
// the first padding byte. The right approach would be to remove the last
// byte, encrypt it, and then compare it to the result of the encryption
// oracle, but this approach is fine too.
assert_eq!(rountrip_text.0[..138], clear_text.0);
println!(
"[okay] Challenge 12: {}",
@@ -192,5 +198,27 @@ pub fn challenge12() {
}
pub fn challenge13() {
fn profile_for(input: &str, key: &Bytes) -> Bytes {
let mut r = String::new();
for c in input.chars() {
if !(c.is_ascii_alphabetic() || c == '.' || c == '@') {
panic!("profile_for: invalid char {}", c);
}
}
r.push_str("email=");
r.push_str(input);
r.push_str("&uid=1337&role=user");
ecb::encrypt(&key, &Bytes(r.as_bytes().to_vec()))
}
fn decrypt(key: &Bytes, data: &Bytes) -> HashMap<String, String> {
let c = ecb::decrypt(&key, &data);
parser::parse_key_value(&c.to_utf8())
}
let key = Bytes::random(16); // consistent but unknown key
let profile = profile_for("omgitsme@gmail.com", &key);
let dict = decrypt(&key, &profile);
println!("{:?}", dict);
println!("[xxxx] Challenge 13: TBD");
}