first commit

This commit is contained in:
Muhamad Ibnu Fadhil 2025-11-17 20:11:26 +07:00
commit 27e101260c
6 changed files with 547 additions and 0 deletions

17
README.md Normal file
View file

@ -0,0 +1,17 @@
## Benchmark Latency and Throughput of Stitcher
### Benchmark Latency
0. Have Python installed and the deps : pandas and matplotlib
1. Go to `cd latency`
2. Run this in the terminal
```
./request.sh -o <output_location> -u "<backend_url>" -i <iteration> --no-output
```
### Benchmark Throughput
0. Have JMetter installed, [download here](https://jmeter.apache.org/download_jmeter.cgi)
1. Go to `cd throughput`
2. Run this in the terminal
```
./request.sh -o <output_folder> -H "<backend_ip>" -t <num_of_threads> -l <num_of_loops>
```
3. outputs will be in the directory specified

60
latency/generate_graph.py Normal file
View file

@ -0,0 +1,60 @@
import argparse
import pandas as pd
import matplotlib.pyplot as plt
def shorten_title(title, max_len=12):
"""Return a shortened title for plotting."""
return title if len(title) <= max_len else title[:max_len] + "..."
def main():
parser = argparse.ArgumentParser(description="Generate latency graph from CSV.")
parser.add_argument("--input", required=True, help="Input CSV file")
parser.add_argument("--output", required=True, help="Output PNG path")
parser.add_argument("--user", required=True, help="Username for graph title")
args = parser.parse_args()
# Load CSV
df = pd.read_csv(args.input)
# Convert to float
df["Latency(s)"] = pd.to_numeric(df["Latency(s)"], errors="coerce")
df = df.dropna(subset=["Latency(s)"])
latencies = df["Latency(s)"].values
titles = df["Title"].values
short_titles = [shorten_title(t) for t in titles]
# Compute average
avg_latency = latencies.mean()
plt.figure(figsize=(14, 6))
# --- Bar chart ---
bars = plt.bar(short_titles, latencies)
# --- Average line ---
plt.axhline(y=avg_latency, linestyle="--")
plt.text(
-0.4, avg_latency,
f"Avg = {avg_latency:.3f}s",
va="bottom",
fontsize=10,
fontweight="bold"
)
# --- Label each bar with exact value ---
for i, value in enumerate(latencies):
plt.text(i, value, f"{value:.3f}", ha="center", va="bottom", fontsize=8)
plt.xticks(rotation=45, ha="right")
plt.xlabel("Name")
plt.ylabel("Latency (seconds)")
plt.title(f"Latency Performance of {args.user}")
plt.tight_layout()
plt.savefig(args.output)
print(f"Graph saved to: {args.output}")
if __name__ == "__main__":
main()

137
latency/request.sh Executable file
View file

@ -0,0 +1,137 @@
#!/bin/bash
# Default API URL
API_URL="http://stitcher.local:5000"
OUTPUT_DIR=""
ITERATION_RANGE=""
SAVE_OUTPUT=true
# Parse command line arguments
while [[ "$#" -gt 0 ]]; do
case $1 in
-o|--output) OUTPUT_DIR="$2"; shift ;;
-u|--url) API_URL="$2"; shift ;;
-i|--iteration) ITERATION_RANGE="$2"; shift ;;
-n|--no-output) SAVE_OUTPUT=false ;;
*) echo "Unknown parameter passed: $1"; exit 1 ;;
esac
shift
done
# Check if output directory is specified
if [ -z "$OUTPUT_DIR" ]; then
echo "Error: Output directory not specified. Use -o or --output to specify the directory."
exit 1
fi
# Create the output directory if it doesn't exist
mkdir -p "$OUTPUT_DIR"
LOG_FILE="${OUTPUT_DIR}/results.txt"
PERFORMANCE_FILE="${OUTPUT_DIR}/performance.csv"
# Add CSV header
echo "Title,Latency(s),Payload" > "$PERFORMANCE_FILE"
exec > >(tee -a "$LOG_FILE") 2>&1
# Define the tests and their corresponding payloads
titles=(
"01_L19-N21-Small_Square_Image_3x3_tiles"
"02_H27-K30-Small_Square_Image_4x4_tiles_0.75_scale"
"03_Q46-W48-Small_Rectangle_image_3x6_tiles"
"04_K22-N29-Small_Long_Rectangle_Image_8x4_tiles_0.5_scale"
"05_O30-P31-Small_Square_Cropped_to_LeftOneTile"
"06_X46-Y47-Small_Square_Cropped_to_MiddleEqually"
"07_T21-Z27-Medium_7x7_Square_Cropped_to_LeftTop4x3Tile"
"08_X14-AD20-Medium_7x7_Square_Cropped_to_MiddleEqually"
"09_A1-AE13-Entire_Left_Panel"
"10_A14-AE42-Entire_Middle_Panel"
"11_A1-AE55-Full_Image"
)
payloads=(
'{"canvas_rect":"L19:N21","crop_offset":[0,0],"crop_size":[1,1],"output_scale":1}'
'{"canvas_rect":"H27:K30","crop_offset":[0,0],"crop_size":[1,1],"output_scale":0.75}'
'{"canvas_rect":"Q46:W48","crop_offset":[0,0],"crop_size":[1,1],"output_scale":1}'
'{"canvas_rect":"K22:N29","crop_offset":[0,0],"crop_size":[1,1],"output_scale":0.5}'
'{"canvas_rect":"O30:P31","crop_offset":[0.075,0.625],"crop_size":[0.4,0.35],"output_scale":1}'
'{"canvas_rect":"X46:Y47","crop_offset":[0.25,0.25],"crop_size":[0.5,0.5],"output_scale":1}'
'{"canvas_rect":"T21:Z27","crop_offset":[0.0125,0.0125],"crop_size":[0.55,0.375],"output_scale":1}'
'{"canvas_rect":"X14:AD20","crop_offset":[0.25,0.25],"crop_size":[0.5,0.5],"output_scale":1}'
'{"canvas_rect":"A1:AE13","crop_offset":[0,0],"crop_size":[1,1],"output_scale":1}'
'{"canvas_rect":"A14:AE42","crop_offset":[0,0],"crop_size":[1,1],"output_scale":1}'
'{"canvas_rect":"A1:AE55","crop_offset":[0,0],"crop_size":[1,1],"output_scale":1}'
)
# Determine the loop range
if [ -z "$ITERATION_RANGE" ]; then
start=0
end=$((${#titles[@]} - 1))
elif [[ $ITERATION_RANGE == -* ]]; then
index=$((${ITERATION_RANGE#-} - 1));
if [ "$index" -lt 0 ] || [ "$index" -ge "${#titles[@]}" ]; then echo "Error: Invalid iteration number."; exit 1; fi
start=$index; end=$index
elif [[ $ITERATION_RANGE == *-* ]]; then
IFS='-' read -r start_range end_range <<< "$ITERATION_RANGE";
start=$((start_range - 1)); end=$((end_range - 1));
if [ "$start" -lt 0 ] || [ "$end" -ge "${#titles[@]}" ] || [ "$start" -gt "$end" ]; then echo "Error: Invalid iteration range."; exit 1; fi
else
index=$(($ITERATION_RANGE - 1));
if [ "$index" -lt 0 ] || [ "$index" -ge "${#titles[@]}" ]; then echo "Error: Invalid iteration number."; exit 1; fi
start=$index; end=$index
fi
# Loop through the tests and execute curl
for i in $(seq $start $end); do
title="${titles[$i]}"
payload="${payloads[$i]}"
echo "Running test: $title"
# Set the output target for curl. If --no-output is used, send to /dev/null
if [ "$SAVE_OUTPUT" = true ]; then
output_target="$OUTPUT_DIR/${title}.png"
else
output_target="/dev/null"
fi
time_taken=$(curl -o "$output_target" -w "%{time_total}\n" -X POST "${API_URL}/api/image/generate" -H "Content-Type: application/json" -d "$payload" -s | head -n 1)
if [ "$SAVE_OUTPUT" = true ]; then
echo "Image saved to $output_target"
else
echo "Request completed (output discarded)."
fi
echo "Total time taken: ${time_taken}s"
echo "---------------------------------"
# Sanitize payload, somehow the python parser can't accept comma "," inside the payload
payload_sanitized="${payload//,/-}"
echo "\"$title\",$time_taken,\"$payload_sanitized\"" >> "$PERFORMANCE_FILE"
done
echo "All specified tests completed."
# --- Graph Generation Section ---
PYTHON_SCRIPT_NAME="generate_graph.py"
if [ ! -f "$PYTHON_SCRIPT_NAME" ]; then
echo "Warning: Python script '$PYTHON_SCRIPT_NAME' not found. Skipping graph generation."
exit 0
fi
if ! command -v python3 &> /dev/null; then
echo "Warning: python3 is not installed. Skipping graph generation."
exit 0
fi
FOLDER_NAME=$(basename "$OUTPUT_DIR")
GRAPH_OUTPUT_PATH="${OUTPUT_DIR}/latency-${FOLDER_NAME}.png"
echo "---------------------------------"
echo "Generating performance graph..."
python3 "$PYTHON_SCRIPT_NAME" --input "$PERFORMANCE_FILE" --output "$GRAPH_OUTPUT_PATH" --user "$FOLDER_NAME"

40
throughput/jmeter.log Normal file
View file

@ -0,0 +1,40 @@
2025-11-17 17:31:17,310 INFO o.a.j.u.JMeterUtils: Setting Locale to en_EN
2025-11-17 17:31:17,327 INFO o.a.j.JMeter: Loading user properties from: /opt/apache-jmeter-5.6.3/bin/user.properties
2025-11-17 17:31:17,327 INFO o.a.j.JMeter: Loading system properties from: /opt/apache-jmeter-5.6.3/bin/system.properties
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: Copyright (c) 1998-2024 The Apache Software Foundation
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: Version 5.6.3
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: java.version=21.0.8
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: java.vm.name=OpenJDK 64-Bit Server VM
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: os.name=Linux
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: os.arch=amd64
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: os.version=6.14.0-35-generic
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: file.encoding=UTF-8
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: java.awt.headless=null
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: Max memory =1073741824
2025-11-17 17:31:17,332 INFO o.a.j.JMeter: Available Processors =8
2025-11-17 17:31:17,340 INFO o.a.j.JMeter: Default Locale=English (EN)
2025-11-17 17:31:17,341 INFO o.a.j.JMeter: JMeter Locale=English (EN)
2025-11-17 17:31:17,341 INFO o.a.j.JMeter: JMeterHome=/opt/apache-jmeter-5.6.3
2025-11-17 17:31:17,341 INFO o.a.j.JMeter: user.dir =/home/ibnufadhil/Documents/projects/benchmark/throughput
2025-11-17 17:31:17,341 INFO o.a.j.JMeter: PWD =/home/ibnufadhil/Documents/projects/benchmark/throughput
2025-11-17 17:31:17,391 INFO o.a.j.JMeter: IP: 10.250.7.97 Name: SE-146 FullName: SE-146
2025-11-17 17:31:17,398 INFO o.a.j.JMeter: Loaded icon properties from org/apache/jmeter/images/icon.properties
2025-11-17 17:31:17,543 INFO o.a.j.JMeterGuiLauncher: Setting LAF to: com.github.weisj.darklaf.DarkLaf:com.github.weisj.darklaf.theme.DarculaTheme
2025-11-17 17:31:25,059 INFO o.a.j.s.FileServer: Default base='/home/ibnufadhil/Documents/projects/benchmark/throughput'
2025-11-17 17:31:25,061 INFO o.a.j.g.a.Load: Loading file: /home/ibnufadhil/Documents/projects/benchmark/throughput/stitcher-benchmark.jmx
2025-11-17 17:31:25,061 INFO o.a.j.s.FileServer: Set new base='/home/ibnufadhil/Documents/projects/benchmark/throughput'
2025-11-17 17:31:25,209 INFO o.a.j.s.SaveService: Testplan (JMX) version: 2.2. Testlog (JTL) version: 2.2
2025-11-17 17:31:25,236 INFO o.a.j.s.SaveService: Using SaveService properties version 5.0
2025-11-17 17:31:25,238 INFO o.a.j.s.SaveService: Using SaveService properties file encoding UTF-8
2025-11-17 17:31:25,240 INFO o.a.j.s.SaveService: Loading file: /home/ibnufadhil/Documents/projects/benchmark/throughput/stitcher-benchmark.jmx
2025-11-17 17:31:25,259 INFO o.a.j.p.h.s.HTTPSamplerBase: Parser for text/html is org.apache.jmeter.protocol.http.parser.LagartoBasedHtmlParser
2025-11-17 17:31:25,259 INFO o.a.j.p.h.s.HTTPSamplerBase: Parser for application/xhtml+xml is org.apache.jmeter.protocol.http.parser.LagartoBasedHtmlParser
2025-11-17 17:31:25,260 INFO o.a.j.p.h.s.HTTPSamplerBase: Parser for application/xml is org.apache.jmeter.protocol.http.parser.LagartoBasedHtmlParser
2025-11-17 17:31:25,260 INFO o.a.j.p.h.s.HTTPSamplerBase: Parser for text/xml is org.apache.jmeter.protocol.http.parser.LagartoBasedHtmlParser
2025-11-17 17:31:25,260 INFO o.a.j.p.h.s.HTTPSamplerBase: Parser for text/vnd.wap.wml is org.apache.jmeter.protocol.http.parser.RegexpHTMLParser
2025-11-17 17:31:25,260 INFO o.a.j.p.h.s.HTTPSamplerBase: Parser for text/css is org.apache.jmeter.protocol.http.parser.CssParser
2025-11-17 17:31:25,601 INFO o.a.j.s.SampleResult: Note: Sample TimeStamps are START times
2025-11-17 17:31:25,601 INFO o.a.j.s.SampleResult: sampleresult.default.encoding is set to UTF-8
2025-11-17 17:31:25,601 INFO o.a.j.s.SampleResult: sampleresult.useNanoTime=true
2025-11-17 17:31:25,601 INFO o.a.j.s.SampleResult: sampleresult.nanoThreadSleep=5000
2025-11-17 17:31:25,712 INFO o.a.j.s.FileServer: Set new base='/home/ibnufadhil/Documents/projects/benchmark/throughput'

114
throughput/request.sh Executable file
View file

@ -0,0 +1,114 @@
#!/bin/bash
JMX_FILE="./stitcher-benchmark.jmx"
JMETER_CMD="jmeter"
DEFAULT_THREADS=10
DEFAULT_LOOPS=10
APP_NAME=""
TARGET_URL=""
THREADS=$DEFAULT_THREADS
LOOPS=$DEFAULT_LOOPS
show_help() {
echo "Usage: $0 -u <AppName> -H <TargetURL> [-t <Threads>] [-l <Loops>]"
echo ""
echo "Runs a JMeter benchmark test with specified parameters."
echo ""
echo "Required Flags:"
echo " -o, --output The name of the app or user for organizing results."
echo " -H, --host The target URL or IP address for the test (e.g., 10.250.22.29)."
echo ""
echo "Optional Flags:"
echo " -t, --threads Number of concurrent threads (users). Default: ${DEFAULT_THREADS}."
echo " -l, --loops Number of loops each thread will execute. Default: ${DEFAULT_LOOPS}."
echo " -h, --help Display this help message and exit."
}
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-o|--output)
APP_NAME="$2"
shift
shift
;;
-H|--host)
TARGET_URL="$2"
shift
shift
;;
-t|--threads)
THREADS="$2"
shift
shift
;;
-l|--loops)
LOOPS="$2"
shift
shift
;;
-h|--help)
show_help
exit 0
;;
*)
echo "Unknown option: $1"
show_help
exit 1
;;
esac
done
if [ -z "$APP_NAME" ] || [ -z "$TARGET_URL" ]; then
echo "ERROR: Missing required arguments."
echo ""
show_help
exit 1
fi
# --- Define Output Structure ---
MAIN_OUTPUT_DIR="./${APP_NAME}"
RESULT_CSV="${MAIN_OUTPUT_DIR}/${APP_NAME}_benchmark.csv"
RESULT_DASHBOARD="${MAIN_OUTPUT_DIR}/${APP_NAME}_dashboard"
JMETER_LOG_FILE="${MAIN_OUTPUT_DIR}/${APP_NAME}_jmeter.log"
echo "--- Preparing for test run: '$APP_NAME' ---"
mkdir -p "$MAIN_OUTPUT_DIR"
echo "Output will be saved in: $MAIN_OUTPUT_DIR"
echo "Cleaning up previous results..."
rm -f "$RESULT_CSV"
rm -rf "$RESULT_DASHBOARD"
rm -f "$JMETER_LOG_FILE"
echo "Cleanup complete."
echo ""
echo "--- Starting JMeter Benchmark ---"
echo " Target Host: $TARGET_URL"
echo " Concurrency: $THREADS threads"
echo " Iterations: $LOOPS loops per thread"
echo " Total Requests: $((THREADS * LOOPS))"
echo "---------------------------------"
$JMETER_CMD -n \
-t "$JMX_FILE" \
-l "$RESULT_CSV" \
-e -o "$RESULT_DASHBOARD" \
-j "$JMETER_LOG_FILE" \
-Jthreads="$THREADS" \
-Jloops="$LOOPS" \
-Jurl="$TARGET_URL"
if [ $? -eq 0 ]; then
echo ""
echo "--- Benchmark Finished Successfully ---"
echo "The HTML report is available here:"
echo "file://$PWD/${RESULT_DASHBOARD}/index.html"
echo "---------------------------------------"
else
echo ""
echo "--- JMeter Finished with an Error ---"
echo "Check the log file for details: ${JMETER_LOG_FILE}"
echo "-------------------------------------"
fi

View file

@ -0,0 +1,179 @@
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.6.3">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="stitcher-benchmark">
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables">
<collectionProp name="Arguments.arguments"/>
</elementProp>
</TestPlan>
<hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="Thread Group">
<stringProp name="ThreadGroup.num_threads">${__P(threads, 10)}</stringProp>
<intProp name="ThreadGroup.ramp_time">1</intProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller">
<stringProp name="LoopController.loops">${__P(loops, 50)}</stringProp>
<boolProp name="LoopController.continue_forever">false</boolProp>
</elementProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="HTTP Request">
<stringProp name="HTTPSampler.domain">${__P(url, stitchaton.local)}</stringProp>
<stringProp name="HTTPSampler.port">5000</stringProp>
<stringProp name="HTTPSampler.protocol">http</stringProp>
<stringProp name="HTTPSampler.path">/api/image/generate</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">{&#xd;
&quot;canvas_rect&quot;: &quot;${canvasRect}&quot;,&#xd;
&quot;crop_offset&quot;: [0.25, 0.25],&#xd;
&quot;crop_size&quot;: [0.5, 0.5],&#xd;
&quot;output_scale&quot;: 0.5&#xd;
}</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
</HTTPSamplerProxy>
<hashTree>
<JSR223PreProcessor guiclass="TestBeanGUI" testclass="JSR223PreProcessor" testname="JSR223 PreProcessor">
<stringProp name="cacheKey">true</stringProp>
<stringProp name="filename"></stringProp>
<stringProp name="parameters"></stringProp>
<stringProp name="script">// Function to convert a numeric index (1-31) to the SBS Plate Row format (A-AE)
String indexToRow(int num) {
if (num &lt;= 0) return &quot;&quot;;
if (num &lt;= 26) {
// A-Z is ASCII 65-90. &apos;A&apos; is char 65. So (num - 1) + 65.
return (char)(num + 64) as String;
} else {
// For AA-AE, the first char is &apos;A&apos;.
// 27 -&gt; AA, 28 -&gt; AB, etc.
// The second char is (num - 27) + 65 -&gt; &apos;A&apos;, &apos;B&apos;, etc.
return &quot;A&quot; + (char)(num - 26 + 64) as String;
}
}
// Define the maximum starting indices for a 2x2 grid
// Max row is AE (31), so max starting row is AD (30)
// Max col is 55, so max starting col is 54
int maxStartRowIndex = 30;
int maxStartCol = 54;
// Generate random starting coordinates
// nextInt(bound) generates 0 to bound-1, so add 1
int randomRowStartIndex = new Random().nextInt(maxStartRowIndex) + 1;
int randomColStart = new Random().nextInt(maxStartCol) + 1;
// Calculate the end coordinates for the 2x2 grid
String rowStart = indexToRow(randomRowStartIndex);
String rowEnd = indexToRow(randomRowStartIndex + 1);
int colEnd = randomColStart + 1;
// Construct the final canvas_rect string
String canvasRectValue = &quot;${rowStart}${randomColStart}:${rowEnd}${colEnd}&quot;;
// Store the generated string in a JMeter variable named &quot;canvasRect&quot;
// This makes it available to the HTTP Request sampler
vars.put(&quot;canvasRect&quot;, canvasRectValue);
// Optional: Log the generated value to see it in the JMeter log (for debugging)
log.info(&quot;Generated canvas_rect: &quot; + canvasRectValue);</stringProp>
<stringProp name="scriptLanguage">groovy</stringProp>
</JSR223PreProcessor>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Content-Type</stringProp>
<stringProp name="Header.value">application/json</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ResultCollector guiclass="SummaryReport" testclass="ResultCollector" testname="Summary Report">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename">/home/ibnufadhil/Documents/projects/benchmark/result.csv</stringProp>
</ResultCollector>
<hashTree/>
<ResultCollector guiclass="ViewResultsFullVisualizer" testclass="ResultCollector" testname="View Results Tree">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>