WIP replay integration tests

This commit is contained in:
Gabe Kangas 2023-08-31 22:19:37 -07:00
parent 8021c66869
commit d947c4b4a4
No known key found for this signature in database
GPG Key ID: 4345B2060657F330
5 changed files with 11214 additions and 57 deletions

10985
test/automated/replays/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,18 @@
{
"name": "owncast-test-automation",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "jest --bail"
},
"author": "",
"license": "ISC",
"dependencies": {
"m3u8-parser": "^4.7.0",
"node-fetch": "^2.6.7"
},
"devDependencies": {
"jest": "^26.6.3"
}
}

View File

@ -0,0 +1,130 @@
const m3u8Parser = require('m3u8-parser');
const fetch = require('node-fetch');
const url = require('url');
const { test } = require('@jest/globals');
const REPLAYS_API = '/api/replays';
const TEST_OWNCAST_INSTANCE = 'http://localhost:8080';
const HLS_FETCH_ITERATIONS = 5;
jest.setTimeout(40000);
async function getPlaylist(urlString) {
const response = await fetch(urlString);
expect(response.status).toBe(200);
const body = await response.text();
var parser = new m3u8Parser.Parser();
parser.push(body);
parser.end();
return parser.manifest;
}
async function getReplaysAPI(urlString) {
const response = await fetch(urlString);
expect(response.status).toBe(200);
const body = await response.text();
return body;
}
function normalizeUrl(urlString, baseUrl) {
let parsedString = url.parse(urlString);
if (!parsedString.host) {
const testInstanceRoot = url.parse(baseUrl);
parsedString.protocol = testInstanceRoot.protocol;
parsedString.host = testInstanceRoot.host;
const filename = baseUrl.substring(baseUrl.lastIndexOf('/') + 1);
parsedString.pathname =
testInstanceRoot.pathname.replace(filename, '') + urlString;
}
return url.format(parsedString).toString();
}
// Iterate over an array of video segments and make sure they return back
// valid status.
async function validateSegments(segments) {
for (let segment of segments) {
const res = await fetch(segment);
expect(res.status).toBe(200);
}
}
describe('fetch list of clips', () => {
const replaysAPIEndpoint = `${TEST_OWNCAST_INSTANCE}${REPLAYS_API}`;
// var masterPlaylist;
// var mediaPlaylistUrl;
test('fetch replay list', async (done) => {
console.log(replaysAPIEndpoint);
try {
const response = await getReplaysAPI(replaysAPIEndpoint);
console.log(response);
} catch (e) {
console.error('error fetching and parsing master playlist', e);
}
done();
});
// test('verify there is a media playlist', () => {
// // Master playlist should have at least one media playlist.
// expect(masterPlaylist.playlists.length).toBe(1);
// try {
// mediaPlaylistUrl = normalizeUrl(
// masterPlaylist.playlists[0].uri,
// masterPlaylistUrl
// );
// } catch (e) {
// console.error('error fetching and parsing media playlist', e);
// }
// });
// test('verify there are segments', async (done) => {
// let playlist;
// try {
// playlist = await getPlaylist(mediaPlaylistUrl);
// } catch (e) {
// console.error('error verifying segments in media playlist', e);
// }
// const segments = playlist.segments;
// expect(segments.length).toBeGreaterThan(0);
// done();
// });
// // Iterate over segments and make sure they change.
// // Use the reported duration of the segment to wait to
// // fetch another just like a real HLS player would do.
// var lastSegmentUrl;
// for (let i = 0; i < HLS_FETCH_ITERATIONS; i++) {
// test('fetch and monitor media playlist segments ' + i, async (done) => {
// await new Promise((r) => setTimeout(r, 5000));
// try {
// var playlist = await getPlaylist(mediaPlaylistUrl);
// } catch (e) {
// console.error('error updating media playlist', mediaPlaylistUrl, e);
// }
// const segments = playlist.segments;
// const segment = segments[segments.length - 1];
// expect(segment.uri).not.toBe(lastSegmentUrl);
// try {
// var segmentUrl = normalizeUrl(segment.uri, mediaPlaylistUrl);
// await validateSegments([segmentUrl]);
// } catch (e) {
// console.error('unable to validate HLS segment', segmentUrl, e);
// }
// lastSegmentUrl = segment.uri;
// done();
// });
// }
});

19
test/automated/replays/run.sh Executable file
View File

@ -0,0 +1,19 @@
#!/bin/bash
set -e
source ../tools.sh
# Install the node test framework
npm install --silent >/dev/null
install_ffmpeg
start_owncast "--enableReplayFeatures"
start_stream
sleep 10
# Run tests against a fresh install with no settings.
npm test

View File

@ -3,91 +3,96 @@
set -e
function install_ffmpeg() {
# install a specific version of ffmpeg
# install a specific version of ffmpeg
FFMPEG_VER="4.4.1"
FFMPEG_PATH="$(pwd)/ffmpeg-$FFMPEG_VER"
PATH=$FFMPEG_PATH:$PATH
FFMPEG_VER="4.4.1"
FFMPEG_PATH="$(pwd)/ffmpeg-$FFMPEG_VER"
PATH=$FFMPEG_PATH:$PATH
if ! [[ -d "$FFMPEG_PATH" ]]; then
mkdir "$FFMPEG_PATH"
fi
if ! [[ -d "$FFMPEG_PATH" ]]; then
mkdir "$FFMPEG_PATH"
fi
pushd "$FFMPEG_PATH" >/dev/null
pushd "$FFMPEG_PATH" >/dev/null
if [[ -x "$FFMPEG_PATH/ffmpeg" ]]; then
ffmpeg_version=$("$FFMPEG_PATH/ffmpeg" -version | awk -F 'ffmpeg version' '{print $2}' | awk 'NR==1{print $1}')
if [[ -x "$FFMPEG_PATH/ffmpeg" ]]; then
if [[ "$ffmpeg_version" == "$FFMPEG_VER-static" ]]; then
popd >/dev/null
return 0
else
mv "$FFMPEG_PATH/ffmpeg" "$FFMPEG_PATH/ffmpeg.bk" || rm -f "$FFMPEG_PATH/ffmpeg"
fi
fi
ffmpeg_version=$("$FFMPEG_PATH/ffmpeg" -version | awk -F 'ffmpeg version' '{print $2}' | awk 'NR==1{print $1}')
rm -f ffmpeg.zip
curl -sL --fail https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v${FFMPEG_VER}/ffmpeg-${FFMPEG_VER}-linux-64.zip --output ffmpeg.zip >/dev/null
unzip -o ffmpeg.zip >/dev/null && rm -f ffmpeg.zip
chmod +x ffmpeg
PATH=$FFMPEG_PATH:$PATH
if [[ "$ffmpeg_version" == "$FFMPEG_VER-static" ]]; then
popd >/dev/null
return 0
else
mv "$FFMPEG_PATH/ffmpeg" "$FFMPEG_PATH/ffmpeg.bk" || rm -f "$FFMPEG_PATH/ffmpeg"
fi
fi
popd >/dev/null
rm -f ffmpeg.zip
curl -sL --fail https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v${FFMPEG_VER}/ffmpeg-${FFMPEG_VER}-linux-64.zip --output ffmpeg.zip >/dev/null
unzip -o ffmpeg.zip >/dev/null && rm -f ffmpeg.zip
chmod +x ffmpeg
PATH=$FFMPEG_PATH:$PATH
popd >/dev/null
}
function start_owncast() {
# Build and run owncast from source
echo "Building owncast..."
pushd "$(git rev-parse --show-toplevel)" >/dev/null
go build -o owncast main.go
# Build and run owncast from source
echo "Building owncast..."
pushd "$(git rev-parse --show-toplevel)" >/dev/null
go build -o owncast main.go
echo "Running owncast..."
./owncast -database "$TEMP_DB" &
SERVER_PID=$!
popd >/dev/null
if [ -z "$1" ]; then
echo "Running owncast..."
else
echo "Running owncast with flags: $1"
fi
sleep 5
./owncast -database "$TEMP_DB" $1 &
SERVER_PID=$!
popd >/dev/null
sleep 5
}
function start_stream() {
# Start streaming the test file over RTMP to the local owncast instance.
../../ocTestStream.sh &
STREAM_PID=$!
# Start streaming the test file over RTMP to the local owncast instance.
../../ocTestStream.sh &
STREAM_PID=$!
echo "Waiting for stream to start..."
sleep 12
echo "Waiting for stream to start..."
sleep 12
}
function update_storage_config() {
echo "Configuring external storage to use ${S3_BUCKET}..."
echo "Configuring external storage to use ${S3_BUCKET}..."
# Hard-coded to admin:abc123 for auth
curl --fail 'http://localhost:8080/api/admin/config/s3' \
-H 'Authorization: Basic YWRtaW46YWJjMTIz' \
--data-raw "{\"value\":{\"accessKey\":\"${S3_ACCESS_KEY}\",\"acl\":\"\",\"bucket\":\"${S3_BUCKET}\",\"enabled\":true,\"endpoint\":\"${S3_ENDPOINT}\",\"region\":\"${S3_REGION}\",\"secret\":\"${S3_SECRET}\",\"servingEndpoint\":\"\"}}"
# Hard-coded to admin:abc123 for auth
curl --fail 'http://localhost:8080/api/admin/config/s3' \
-H 'Authorization: Basic YWRtaW46YWJjMTIz' \
--data-raw "{\"value\":{\"accessKey\":\"${S3_ACCESS_KEY}\",\"acl\":\"\",\"bucket\":\"${S3_BUCKET}\",\"enabled\":true,\"endpoint\":\"${S3_ENDPOINT}\",\"region\":\"${S3_REGION}\",\"secret\":\"${S3_SECRET}\",\"servingEndpoint\":\"\"}}"
}
function kill_with_kids() {
# kill a process and all its children (by pid)! return no error.
# kill a process and all its children (by pid)! return no error.
if [[ -n $1 ]]; then
mapfile -t CHILDREN_PID_LIST < <(ps --ppid "$1" -o pid= &>/dev/null || true)
for child_pid in "${CHILDREN_PID_LIST[@]}"; do
kill "$child_pid" &>/dev/null || true
wait "$child_pid" &>/dev/null || true
done
kill "$1" &>/dev/null || true
wait "$1" &>/dev/null || true
fi
if [[ -n $1 ]]; then
mapfile -t CHILDREN_PID_LIST < <(ps --ppid "$1" -o pid= &>/dev/null || true)
for child_pid in "${CHILDREN_PID_LIST[@]}"; do
kill "$child_pid" &>/dev/null || true
wait "$child_pid" &>/dev/null || true
done
kill "$1" &>/dev/null || true
wait "$1" &>/dev/null || true
fi
}
function finish() {
echo "Cleaning up..."
kill_with_kids "$STREAM_PID"
kill "$SERVER_PID" &>/dev/null || true
wait "$SERVER_PID" &>/dev/null || true
echo "Cleaning up..."
kill_with_kids "$STREAM_PID"
kill "$SERVER_PID" &>/dev/null || true
wait "$SERVER_PID" &>/dev/null || true
rm -fr "$TEMP_DB"
}