Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions plugins/xdebug/xdebug_transforms.cc
Original file line number Diff line number Diff line change
Expand Up @@ -296,8 +296,13 @@ body_transform(TSCont contp, TSEvent event, void * /* edata ATS_UNUSED */)
}

if (TSVIONTodoGet(src_vio) > 0) {
TSVIOReenable(data->output_vio);
TSContCall(TSVIOContGet(src_vio), TS_EVENT_VCONN_WRITE_READY, src_vio);
if (towrite > 0) {
// Only reenable and notify upstream if we consumed data.
// If no data was available, just return and wait for the
// upstream to call us again when more data arrives.
Comment on lines +299 to +302
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we replace this "what" comment with a "why" comment? This comment says what the code is doing (not calling reenable unless we have consumed data), but that is rather obvious. Can we reword this to explain that the reason we do this is to address the transform loop described in the PR comment?

TSVIOReenable(data->output_vio);
TSContCall(TSVIOContGet(src_vio), TS_EVENT_VCONN_WRITE_READY, src_vio);
}
} else {
// End of src vio
// Write post body content and update output VIO
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# A simple server that sends headers and first chunk immediately, then delays
# before sending subsequent chunks. This triggers the xdebug transform bug
# when the transform is called but no body data is available yet.
#
# Usage: slow-body-server.sh <port> <outfile>

PORT=$1
OUTFILE=$2

# Create a named pipe for the response
FIFO=$(mktemp -u)
mkfifo "$FIFO"

# Start nc in background, reading from the fifo
nc -l "$PORT" > "$OUTFILE" < "$FIFO" &
NC_PID=$!

# Open the fifo for writing
exec 3>"$FIFO"

# Wait for the request to arrive (look for empty line ending headers)
while true; do
if [[ -f "$OUTFILE" ]]; then
if grep -q $'^\r$' "$OUTFILE" 2>/dev/null || grep -q '^$' "$OUTFILE" 2>/dev/null; then
break
fi
fi
sleep 0.1
done

# Send headers with chunked encoding
printf "HTTP/1.1 200 OK\r\n" >&3
printf "Content-Type: text/plain\r\n" >&3
printf "Transfer-Encoding: chunked\r\n" >&3
printf "X-Response: slow-chunked\r\n" >&3
printf "\r\n" >&3

# Send first chunk immediately
printf "5\r\n" >&3
printf "hello\r\n" >&3

# Delay before next chunk - this is the key to triggering the bug
# The transform will see more data expected but buffer empty
sleep 2

# Send second chunk
printf "5\r\n" >&3
printf "world\r\n" >&3

# End chunked encoding
printf "0\r\n" >&3
printf "\r\n" >&3

# Close the fifo
exec 3>&-

# Wait for nc to finish
wait $NC_PID 2>/dev/null

# Cleanup
rm -f "$FIFO"
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Verify the transform completed successfully and didn't produce excessive log output.
#
# Our test sends a 2-chunk response (5 bytes each). Normal behavior:
# - expected (first chunk)
# - consumed (first chunk)
# - expected (HttpTunnel callback - no data yet)
# - expected (second chunk)
# - consumed (second chunk)
#
# That's 4 "expected" lines for 2 chunks. The bug would cause many more "expected"
# lines as the transform loops, but in our test the loop might not be tight enough
# to produce many lines. We check that we don't have an excessive number.

LOGFILE="$1"
MAX_EXPECTED_LINES=10 # Allow up to 10; our 2-chunk response should produce ~4

if [[ ! -f "$LOGFILE" ]]; then
echo "FAIL: Log file not found: $LOGFILE"
exit 1
fi

# Count "expected" lines
expected_count=$(grep -c "bytes of body is expected" "$LOGFILE" 2>/dev/null || echo "0")

echo "Transform log analysis:"
echo " 'expected' lines: $expected_count (max allowed: $MAX_EXPECTED_LINES)"
echo ""
echo "Log contents:"
grep -E "(bytes of body is expected|consumed.*bytes)" "$LOGFILE"
echo ""

if [[ $expected_count -gt $MAX_EXPECTED_LINES ]]; then
echo "FAIL: Found $expected_count 'expected' lines - indicates potential loop bug"
exit 1
fi

# Also verify we got exactly 2 consumed lines (for our 2 chunks)
consumed_count=$(grep -c "consumed.*bytes" "$LOGFILE" 2>/dev/null || echo "0")
echo " 'consumed' lines: $consumed_count (expected: 2)"

if [[ $consumed_count -ne 2 ]]; then
echo "WARNING: Expected 2 consumed lines but found $consumed_count"
fi
Comment on lines +58 to +60
Copy link

Copilot AI Dec 15, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The script issues a warning when consumed_count != 2 but still exits with success (exit 0). This could mask issues where the transform behavior deviates from expectations. Consider either failing the test when this condition occurs, or removing the check if it's not critical to the test's purpose.

Copilot uses AI. Check for mistakes.

echo "PASS: Transform completed normally"
exit 0
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
'''
Verify xdebug plugin probe-full-json does not hang with slow origin servers.

This test reproduces a bug where the transform would enter an infinite loop
when the origin server delays sending the response body. The bug occurs because
the transform would reenable and call back upstream even when no data was
available to consume.
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Test.Summary = '''
Test xdebug probe-full-json with slow origin to verify no infinite loop
'''

Test.SkipUnless(Condition.PluginExists('xdebug.so'))
Test.SkipUnless(Condition.HasProgram("nc", "nc (netcat) is required for custom slow server"))

# Configure ATS
ts = Test.MakeATSProcess("ts", enable_cache=False)

ts.Disk.records_config.update(
{
"proxy.config.diags.debug.enabled": 1,
"proxy.config.diags.debug.tags": "xdebug_transform",
# Set reasonable timeouts
"proxy.config.http.transaction_no_activity_timeout_in": 10,
"proxy.config.http.transaction_no_activity_timeout_out": 10,
})

ts.Disk.plugin_config.AddLine('xdebug.so --enable=probe-full-json')

# Reserve a port for the custom slow server
Test.GetTcpPort("server_port")

ts.Disk.remap_config.AddLine(f"map / http://127.0.0.1:{Test.Variables.server_port}/")

# Start the custom slow-body server
server = Test.Processes.Process(
"server",
f"bash -c '{Test.TestDirectory}/slow-body-server.sh {Test.Variables.server_port} {Test.RunDirectory}/server_request.txt'")

# Test with probe-full-json=nobody (which triggers the bug most easily)
tr = Test.AddTestRun("Verify probe-full-json with slow body delivery")
tr.TimeOut = 15 # Should complete well under this; timeout indicates hang/loop

# Make the request - use timeout to detect if the request hangs
tr.Processes.Default.Command = (
f'timeout 8 curl -s -o /dev/null -w "%{{http_code}}" '
f'-H "Host: example.com" '
f'-H "X-Debug: probe-full-json=nobody" '
f'http://127.0.0.1:{ts.Variables.port}/test')
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.StartBefore(server)
tr.Processes.Default.StartBefore(ts)

# Should get 200, not timeout (which would cause non-zero return and 124 output)
tr.Processes.Default.Streams.stdout = Testers.ContainsExpression("200", "Should receive 200 OK, not timeout")

# Verify no infinite loop by checking the logs
# The bug manifests as the transform being called with "bytes of body is expected"
# but no data consumed. Every "expected" should be followed by "consumed".
tr2 = Test.AddTestRun("Verify no infinite loop in transform")
tr2.Processes.Default.Command = f"bash {Test.TestDirectory}/verify_no_loop.sh {ts.Variables.LOGDIR}/traffic.out"
tr2.Processes.Default.ReturnCode = 0
tr2.Processes.Default.Streams.stdout = Testers.ContainsExpression(
"PASS", "Verification script should pass - every 'expected' followed by 'consumed'")