use std::collections::HashMap; #[derive(Debug, PartialEq, Eq)] pub enum Token { Identifier(String), Equal, Ampersand, Semicolon, } pub type Tokens = Vec; pub fn parse_key_value(text: &str) -> HashMap { /* Parses foo=bar&baz=qux&zap=zazzle into respective hashmap. */ let mut tokens: Tokens = vec![]; let mut result = HashMap::new(); tokens = scan(text, 0, tokens); for token_chunk in tokens.chunks(4) { match token_chunk { [Token::Identifier(key), Token::Equal, Token::Identifier(value), Token::Ampersand | Token::Semicolon] | [Token::Identifier(key), Token::Equal, Token::Identifier(value)] => { result.insert(key.to_string(), value.to_string()); } _ => panic!("Could not parse {:?}", token_chunk), }; } result } fn scan(code: &str, mut ix: usize, mut tokens: Tokens) -> Tokens { if ix == code.len() { return tokens; } let c: char = code[ix..].chars().next().unwrap(); if c.is_ascii_alphanumeric() || SPECIAL_CHARS.contains(&c) { return scan_identifier(code, ix, tokens); } match c { '&' => tokens.push(Token::Ampersand), '=' => tokens.push(Token::Equal), ';' => tokens.push(Token::Semicolon), _ => panic!("Unexpected char '{}' at index {}", c, ix), } ix += 1; scan(code, ix, tokens) } fn scan_identifier(code: &str, mut ix: usize, mut tokens: Tokens) -> Tokens { let start_ix = ix; let chars = code[ix..].chars(); for c in chars { if c.is_ascii_alphanumeric() || SPECIAL_CHARS.contains(&c) { ix += 1; } else { break; } } let token = Token::Identifier(code[start_ix..ix].to_string()); tokens.push(token); scan(code, ix, tokens) } const SPECIAL_CHARS: &[char] = &['.', '@', '%'];