Skip to content

Commit

Permalink
rename fun
Browse files Browse the repository at this point in the history
  • Loading branch information
polvorin committed Aug 30, 2024
1 parent 0ef57f0 commit fa12032
Showing 1 changed file with 28 additions and 21 deletions.
49 changes: 28 additions & 21 deletions implementations/rust/ockam/ockam_api/src/http_auth/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,18 +122,22 @@ fn body_state(method: &str, headers: &[Header]) -> ockam_core::Result<RequestSta
}
}

fn copy_body(
/* Parse the incoming data, attaching an Authorization header token to it.
* data is received in chunks, and there is no warranty on what we get on each:
* incomplete requests, multiple requests, etc.
*/
fn process_http_buffer(
state: RequestState,
buf: &[u8],
token: &str,
upstream: &str,
) -> ockam_core::Result<(RequestState, Vec<u8>)> {
let mut v = Vec::with_capacity(buf.len());
let mut acc = Vec::with_capacity(buf.len());
let mut s = state;
let mut cursor = buf;
loop {
if cursor.is_empty() {
return Ok((s, v));
return Ok((s, acc));
}
match &mut s {
RequestState::ParsingHeader(prev) => {
Expand All @@ -149,15 +153,15 @@ fn copy_body(
match req.parse(to_parse) {
Ok(httparse::Status::Partial) if prev_size == 0 => {
// No previous buffered, need to copy and own the unparsed data
return Ok((RequestState::ParsingHeader(Some(cursor.to_vec())), v));
return Ok((RequestState::ParsingHeader(Some(cursor.to_vec())), acc));
}
Ok(httparse::Status::Partial) => {
// There was a previous buffer, and we already added the newly data to it
return Ok((s, v));
return Ok((s, acc));
}
Ok(httparse::Status::Complete(body_offset)) => {
cursor = &cursor[body_offset - prev_size..];
attach_auth_token_and_serialize_into(&req, token, upstream, &mut v);
attach_auth_token_and_serialize_into(&req, token, upstream, &mut acc);
s = body_state(req.method.unwrap(), req.headers)?;
}
Err(e) => {
Expand All @@ -168,13 +172,13 @@ fn copy_body(
}
RequestState::RemainingBody(remaining) => {
if *remaining <= cursor.len() {
v.extend_from_slice(&cursor[..*remaining]);
acc.extend_from_slice(&cursor[..*remaining]);
cursor = &cursor[*remaining..];
s = RequestState::ParsingHeader(None);
} else {
v.extend_from_slice(cursor);
acc.extend_from_slice(cursor);
*remaining -= cursor.len();
return Ok((s, v));
return Ok((s, acc));
}
}
RequestState::ParsingChunkedHeader(prev) => {
Expand All @@ -189,31 +193,34 @@ fn copy_body(
Ok(Status::Complete((2, 0))) => {
// this is just a final \r\n. The spec said it should end in a 0-sized
// chunk.. but having seen this on the wild as well.
v.extend_from_slice(&to_parse[..2]);
acc.extend_from_slice(&to_parse[..2]);
cursor = &cursor[2 - prev_size..];
s = RequestState::ParsingHeader(None);
}
Ok(Status::Complete((3, 0))) => {
// this is just a proper 0\r\n final chunk.
v.extend_from_slice(&to_parse[..3]);
acc.extend_from_slice(&to_parse[..3]);
cursor = &cursor[3 - prev_size..];
// There must be a final \r\n. And no more chunks,
// so just reuse the RemainingBody state for this
s = RequestState::RemainingBody(2);
}
Ok(Status::Complete((pos, chunk_size))) => {
v.extend_from_slice(&to_parse[..pos]);
acc.extend_from_slice(&to_parse[..pos]);
cursor = &cursor[pos - prev_size..];
let complete_size = chunk_size + 2; //chunks ends in \r\n
s = RequestState::RemainingInChunk(complete_size.try_into().unwrap());
}
Ok(Status::Partial) if prev_size == 0 => {
// No previous buffered, need to copy and own the unparsed data
return Ok((RequestState::ParsingChunkedHeader(Some(cursor.to_vec())), v));
return Ok((
RequestState::ParsingChunkedHeader(Some(cursor.to_vec())),
acc,
));
}
Ok(Status::Partial) => {
// There was a previous buffer, and we already added the newly data to it
return Ok((s, v));
return Ok((s, acc));
}
Err(e) => {
error!("Error parsing chunk size: {:?}. Buffer: {:?}", e, prev);
Expand All @@ -227,13 +234,13 @@ fn copy_body(
}
RequestState::RemainingInChunk(size) => {
if cursor.len() >= *size {
v.extend_from_slice(&cursor[..*size]);
acc.extend_from_slice(&cursor[..*size]);
cursor = &cursor[*size..];
s = RequestState::ParsingChunkedHeader(None);
} else {
v.extend_from_slice(cursor);
acc.extend_from_slice(cursor);
*size -= cursor.len();
return Ok((s, v));
return Ok((s, acc));
}
}
}
Expand All @@ -257,7 +264,7 @@ impl PortalInterceptor for HttpAuthInterceptor {
if token.is_none() {
error!("No authorization token available");
}
let (new_state, out) = copy_body(
let (new_state, out) = process_http_buffer(
guard.state.clone(),
buffer,
&token.unwrap_or_default(),
Expand Down Expand Up @@ -304,7 +311,7 @@ Transfer-Encoding: gzip, chunked\r\n\r\n\
let mut request_state = RequestState::ParsingHeader(None);
for chunk in data.chunks(size) {
let (next_state, data_out) =
copy_body(request_state, chunk, TOKEN, UPSTREAM).unwrap();
process_http_buffer(request_state, chunk, TOKEN, UPSTREAM).unwrap();
request_state = next_state;
result.extend_from_slice(&data_out);
}
Expand Down Expand Up @@ -341,7 +348,7 @@ field1=value1&field2=value2",
let mut request_state = RequestState::ParsingHeader(None);
for chunk in data.chunks(size) {
let (next_state, data_out) =
copy_body(request_state, chunk, TOKEN, UPSTREAM).unwrap();
process_http_buffer(request_state, chunk, TOKEN, UPSTREAM).unwrap();
request_state = next_state;
result.extend_from_slice(&data_out);
}
Expand Down Expand Up @@ -371,7 +378,7 @@ field1=value1&field2=value2",
let mut request_state = RequestState::ParsingHeader(None);
for chunk in data.chunks(size) {
let (next_state, data_out) =
copy_body(request_state, chunk, TOKEN, UPSTREAM).unwrap();
process_http_buffer(request_state, chunk, TOKEN, UPSTREAM).unwrap();
request_state = next_state;
result.extend_from_slice(&data_out);
}
Expand Down

0 comments on commit fa12032

Please sign in to comment.