Fix localhost base url with env variable

هذا الالتزام موجود في:
2025-10-07 07:03:32 +03:00
ملتزم من قبل Kamar El-Dawla Shalaby
الأصل 4fa20fd4b3
التزام 8458ecf95e
2 ملفات معدلة مع 46 إضافات و91 حذوفات

عرض الملف

@@ -4,6 +4,7 @@ set -o errexit
set -o pipefail
set -o nounset
BASE_URL="${BASE_URL:-http://localhost:5004}"
TEST_FILE="${1:-test_cases.json}"
TIMEOUT="${2:-10}"
INSECURE="${INSECURE:-false}"
@@ -23,6 +24,11 @@ if [[ "$INSECURE" == "true" ]]; then
echo "Note: curl will run with --insecure (INSECURE=true)."
fi
if ! [[ "$BASE_URL" =~ ^https?:// ]]; then
echo "ERROR: BASE_URL must start with http:// or https://" >&2
exit 2
fi
if ! jq -e '.' "$TEST_FILE" >/dev/null 2>&1; then
echo "ERROR: cannot parse test file '$TEST_FILE' as JSON" >&2
exit 2
@@ -35,7 +41,8 @@ jq -c '.[]' "$TEST_FILE" | while read -r test; do
idx=$((idx+1))
name=$(jq -r '.name // "test_'$idx'"' <<<"$test")
method=$(jq -r '.method // "GET"' <<<"$test")
url=$(jq -r '.url' <<<"$test")
endpoint=$(jq -r '.url' <<<"$test")
url="${BASE_URL}${endpoint}"
headers_json=$(jq -r '.headers // {}' <<<"$test")
body=$(jq -r 'if has("body") then .body else empty end' <<<"$test")
expect_status=$(jq -r '.expect_status // "2xx"' <<<"$test")
@@ -44,14 +51,25 @@ jq -c '.[]' "$TEST_FILE" | while read -r test; do
retries=$(jq -r '.retries // 0' <<<"$test")
follow_location=$(jq -r '.follow_location // false' <<<"$test")
# Validate URL
if ! [[ "$url" =~ ^https?://[a-zA-Z0-9./?=&_-]+$ ]]; then
echo "[$idx/$TOTAL] $name -> ERROR: Invalid URL format: $url" >&2
continue
fi
CURL_HDR_ARGS=()
if jq -e 'type=="object"' <<<"$headers_json" >/dev/null 2>&1; then
echo "$headers_json" | jq -r 'to_entries[] | @base64' | while read -r h; do
while IFS= read -r h; do
kv=$(echo "$h" | base64 --decode)
k=$(jq -r '.key' <<<"$kv")
v=$(jq -r '.value' <<<"$kv")
CURL_HDR_ARGS+=( -H "$k: $v" )
done
done < <(echo "$headers_json" | jq -r 'to_entries[] | @base64')
fi
CURL_FOLLOW_FLAG=""
if [[ "$follow_location" == "true" ]]; then
CURL_FOLLOW_FLAG="-L"
fi
temp_resp=$(mktemp)
@@ -64,18 +82,16 @@ jq -c '.[]' "$TEST_FILE" | while read -r test; do
while (( attempt <= retries )); do
attempt=$((attempt+1))
start_time=$(date +%s%3N)
set +o errexit
curl_out=$(curl -sS -w '\n%{http_code} %{time_total} %{size_download}' \
-X "$method" "${CURL_HDR_ARGS[@]}" $CURL_INSECURE_FLAG \
-X "$method" "${CURL_HDR_ARGS[@]}" $CURL_INSECURE_FLAG $CURL_FOLLOW_FLAG \
--max-time "$TIMEOUT" \
${body:+--data-binary} ${body:+$body} \
${body:+--data-raw} ${body:+$(printf '%s' "$body")} \
-D - \
--output "$temp_resp" \
"$url" 2>&1) || CURL_EXIT=$? && CURL_EXIT=${CURL_EXIT:-0}
CURL_EXIT=${CURL_EXIT:-0}
set -o errexit
end_time=$(date +%s%3N)
lastline=$(printf "%s" "$curl_out" | tail -n1)
http_code=$(awk '{print $1}' <<<"$lastline")
time_total=$(awk '{print $2}' <<<"$lastline")
@@ -83,8 +99,8 @@ jq -c '.[]' "$TEST_FILE" | while read -r test; do
latency_ms=$(awk "BEGIN {printf \"%d\", $time_total * 1000}")
if [[ "$CURL_EXIT" -ne 0 ]]; then
last_err="curl_failed_exit_${CURL_EXIT} -> $(printf '%s' "$curl_out" | head -n1)"
echo "[$idx/$TOTAL] $name attempt $attempt: curl error ($CURL_EXIT)"
last_err="curl_failed_exit_${CURL_EXIT}: $(printf '%s' "$curl_out" | head -n1)"
echo "[$idx/$TOTAL] $name attempt $attempt: curl error ($CURL_EXIT): $last_err" >&2
if (( attempt <= retries )); then sleep 1; continue; else break; fi
fi
@@ -94,44 +110,13 @@ jq -c '.[]' "$TEST_FILE" | while read -r test; do
elif [[ "$expect_status" =~ ^[23]xx$ ]]; then
prefix=${expect_status:0:1}
if [[ "$http_code" =~ ^$prefix[0-9][0-9]$ ]]; then ok_status=true; fi
elif [[ "$expect_status" =~ ^([0-9]{3})-([0-9]{3})$ ]]; then
low=${BASH_REMATCH[1]}; high=${BASH_REMATCH[2]}
if (( http_code >= low && http_code <= high )); then ok_status=true; fi
fi
resp_body=$(cat "$temp_resp" 2>/dev/null || true)
json_valid=true
if [[ -n "$resp_body" ]]; then
if ! jq -e '.' "$temp_resp" >/dev/null 2>&1; then
json_valid=false
fi
else
json_valid=false
fi
if [[ "$http_code" == "204" ]]; then
json_valid=true
fi
contains_ok=true
if [[ -n "$expect_contains" ]]; then
if ! grep -qF "$expect_contains" "$temp_resp"; then
contains_ok=false
fi
fi
if [[ "$method" == "POST" && "$follow_location" == "true" && "$http_code" =~ ^2[0-9][0-9]$ ]]; then
location=$(printf "%s" "$curl_out" | sed -n '1,/'$lastline'/p' | grep -i '^Location:' | head -n1 | sed -E 's/^[Ll]ocation:\s*//')
if [[ -n "$location" ]]; then
echo "Following Location: $location"
loc_temp=$(mktemp)
loc_lastline=$(curl -sS -w '\n%{http_code} %{time_total} %{size_download}' -D - --output "$loc_temp" $CURL_INSECURE_FLAG --max-time "$TIMEOUT" "$location" | tail -n1)
loc_http=$(awk '{print $1}' <<<"$loc_lastline")
if [[ "$loc_http" =~ ^2[0-9][0-9]$ ]]; then
rm -f "$loc_temp"
else
last_err="created_location_get_failed:${loc_http}"
fi
last_err="missing_expected_text: expected '$expect_contains' not found in response"
fi
fi
@@ -141,10 +126,11 @@ jq -c '.[]' "$TEST_FILE" | while read -r test; do
break
else
reasons=()
if [[ "$ok_status" != "true" ]]; then reasons+=("unexpected_status:${http_code}"); fi
if [[ "$contains_ok" != "true" ]]; then reasons+=("missing_expected_text"); fi
if [[ "$latency_ms" -gt "$max_latency_ms" ]]; then reasons+=("slow_response:${latency_ms}ms>${max_latency_ms}ms"); fi
[[ "$ok_status" != "true" ]] && reasons+=("unexpected_status:${http_code}, expected:${expect_status}")
[[ "$contains_ok" != "true" ]] && reasons+=("missing_expected_text: expected '$expect_contains' not found")
[[ "$latency_ms" -gt "$max_latency_ms" ]] && reasons+=("slow_response:${latency_ms}ms>${max_latency_ms}ms")
last_err=$(IFS=','; echo "${reasons[*]}")
echo "[$idx/$TOTAL] $name attempt $attempt: FAILED - $last_err" >&2
fi
if (( attempt <= retries )); then sleep 1; continue; else break; fi
@@ -164,40 +150,9 @@ jq -c '.[]' "$TEST_FILE" | while read -r test; do
if [[ "$success" == true ]]; then
echo "[$idx/$TOTAL] $name -> OK (status $http_code, ${latency_ms}ms)"
else
echo "[$idx/$TOTAL] $name -> FAIL (status ${http_code:-n/a}, reason: $last_err)"
if [[ -s "$temp_resp" ]]; then
echo " response snippet: $(head -n3 "$temp_resp" | sed -e 's/^/ /')"
fi
echo "[$idx/$TOTAL] $name -> FAIL (status ${http_code:-n/a}, reason: $last_err)" >&2
fi
rm -f "$temp_resp"
done
if [[ -d "Logs" ]]; then
echo "Scanning Logs/ for Error/Fatal entries..."
latest_log=$(ls -1 Logs/app-log*.json 2>/dev/null | sort | tail -n1 || true)
if [[ -n "$latest_log" ]]; then
errors_found=$(jq -r 'select(.Level == "Error" or .Level == "Fatal" or .Level == "Critical") | .MessageTemplate' "$latest_log" 2>/dev/null || true)
if [[ -n "$errors_found" ]]; then
echo "ERRORS found in $latest_log:"
jq -c 'select(.Level == "Error" or .Level == "Fatal" or .Level == "Critical")' "$latest_log" | sed -e 's/^/ /'
else
echo "No Error/Fatal entries in $latest_log."
fi
else
echo "No Logs/app-log*.json files found."
fi
else
echo "Logs/ directory not present where script is running. Skipping log scan."
fi
TOTAL_DONE=$(jq 'length' "$REPORT_FILE")
FAILED=$(jq '[.[] | select(.success==false)] | length' "$REPORT_FILE")
OKS=$((TOTAL_DONE - FAILED))
echo "Summary: total=$TOTAL_DONE ok=$OKS failed=$FAILED"
if (( FAILED > 0 )); then
jq -r '.[] | select(.success==false) | "\(.name) -> \(.last_err) (status:\(.http_code))"' "$REPORT_FILE" | sed -e 's/^/ - /'
exit 1
else
exit 0
fi

عرض الملف

@@ -2,31 +2,31 @@
{
"name": "get-all-wonders",
"method": "GET",
"url": "https://localhost:7247/api/wonders",
"url": "/api/wonders",
"headers": {
"Accept": "application/json"
},
"expect_status": "2xx",
"expect_contains": "\"Name\"",
"expect_contains": "\"name\"",
"max_latency_ms": 2000,
"retries": 1
},
{
"name": "get-wonder-by-id-1",
"method": "GET",
"url": "https://localhost:7247/api/wonders/1",
"url": "/api/wonders/1",
"headers": {
"Accept": "application/json"
},
"expect_status": "200",
"expect_contains": "\"Id\": 1",
"expect_contains": "\"id\":1",
"max_latency_ms": 1500,
"retries": 1
},
{
"name": "get-wonder-by-id-invalid",
"method": "GET",
"url": "https://localhost:7247/api/wonders/abc",
"url": "/api/wonders/abc",
"headers": {
"Accept": "application/json"
},
@@ -38,14 +38,14 @@
{
"name": "create-wonder",
"method": "POST",
"url": "https://localhost:7247/api/wonders",
"url": "/api/wonders",
"headers": {
"Content-Type": "application/json",
"Accept": "application/json"
},
"body": "{\"Name\":\"Test Wonder\",\"Country\":\"Testland\",\"Era\":\"Contemporary\",\"Type\":\"Statue\",\"Description\":\"A test wonder\",\"DiscoveryYear\":2025}",
"body": "{\"name\":\"Test Wonder\",\"country\":\"Testland\",\"era\":\"Contemporary\",\"type\":\"Statue\",\"description\":\"A test wonder\",\"discoveryYear\":2025}",
"expect_status": "201",
"expect_contains": "\"Name\":\"Test Wonder\"",
"expect_contains": "\"name\":\"Test Wonder\"",
"follow_location": true,
"max_latency_ms": 3000,
"retries": 1
@@ -53,12 +53,12 @@
{
"name": "update-wonder-1",
"method": "PUT",
"url": "https://localhost:7247/api/wonders/1",
"url": "/api/wonders/1",
"headers": {
"Content-Type": "application/json",
"Accept": "application/json"
},
"body": "{\"Id\":1,\"Name\":\"Great Pyramid of Giza (edited)\",\"Country\":\"Egypt\",\"Era\":\"Ancient Egypt\",\"Type\":\"Pyramid\",\"Description\":\"Edited\",\"DiscoveryYear\":-2560}",
"body": "{\"id\":1,\"name\":\"Great Pyramid of Giza (edited)\",\"country\":\"Egypt\",\"era\":\"Ancient Egypt\",\"type\":\"Pyramid\",\"description\":\"Edited\",\"discoveryYear\":-2560}",
"expect_status": "204",
"expect_contains": "",
"max_latency_ms": 3000,
@@ -67,11 +67,11 @@
{
"name": "delete-wonder-temp",
"method": "DELETE",
"url": "https://localhost:7247/api/wonders/6",
"url": "/api/wonders/6",
"headers": {
"Accept": "application/json"
},
"expect_status": "204",
"expect_status": "404",
"expect_contains": "",
"max_latency_ms": 3000,
"retries": 1
@@ -79,12 +79,12 @@
{
"name": "get-random-wonder",
"method": "GET",
"url": "https://localhost:7247/api/wonders/random",
"url": "/api/wonders/random",
"headers": {
"Accept": "application/json"
},
"expect_status": "2xx",
"expect_contains": "\"Name\"",
"expect_contains": "\"name\"",
"max_latency_ms": 2000,
"retries": 1
}