0

HLS tests for local and S3 storage. Closes #1457 (#1460)

This commit is contained in:
Gabe Kangas 2021-10-06 21:03:48 -07:00 committed by GitHub
parent fb1c919e59
commit 7b097ccdd9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 11522 additions and 0 deletions

28
.github/workflows/hls-tests.yml vendored Normal file
View File

@ -0,0 +1,28 @@
name: Automated HLS tests
on:
push:
paths-ignore:
- 'webroot/**'
- pkged.go
pull_request:
paths-ignore:
- 'webroot/**'
- pkged.go
env:
S3_BUCKET: ${{ secrets.S3_BUCKET }}
S3_ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
S3_ENDPOINT: ${{ secrets.S3_ENDPOINT }}
S3_REGION: ${{ secrets.S3_REGION }}
S3_SECRET: ${{ secrets.S3_SECRET }}
jobs:
api:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run HLS tests
run: cd test/automated/hls && ./run.sh

View File

@ -0,0 +1,122 @@
const m3u8Parser = require('m3u8-parser');
const fetch = require('node-fetch');
const url = require('url');
const { test } = require('@jest/globals');
const HLS_SUBDIRECTORY = '/hls/';
const PLAYLIST_NAME = 'stream.m3u8';
const TEST_OWNCAST_INSTANCE = 'http://localhost:8080';
const HLS_FETCH_ITERATIONS = 5;
jest.setTimeout(40000);
async function getPlaylist(urlString) {
const response = await fetch(urlString);
expect(response.status).toBe(200);
const body = await response.text();
var parser = new m3u8Parser.Parser();
parser.push(body);
parser.end();
return parser.manifest;
}
function normalizeUrl(urlString, baseUrl) {
let parsedString = url.parse(urlString);
if (!parsedString.host) {
const testInstanceRoot = url.parse(baseUrl);
parsedString.protocol = testInstanceRoot.protocol;
parsedString.host = testInstanceRoot.host;
const filename = baseUrl.substring(baseUrl.lastIndexOf('/') + 1);
parsedString.pathname =
testInstanceRoot.pathname.replace(filename, '') + urlString;
}
return url.format(parsedString).toString();
}
// Iterate over an array of video segments and make sure they return back
// valid status.
async function validateSegments(segments) {
for (let segment of segments) {
const res = await fetch(segment);
expect(res.status).toBe(200);
}
}
describe('fetch and parse HLS', () => {
const masterPlaylistUrl = `${TEST_OWNCAST_INSTANCE}${HLS_SUBDIRECTORY}${PLAYLIST_NAME}`;
var masterPlaylist;
var mediaPlaylistUrl;
test('fetch master playlist', async (done) => {
try {
masterPlaylist = await getPlaylist(masterPlaylistUrl);
} catch (e) {
console.error('error fetching and parsing master playlist', e);
}
done();
});
test('verify there is a media playlist', () => {
// Master playlist should have at least one media playlist.
expect(masterPlaylist.playlists.length).toBe(1);
try {
mediaPlaylistUrl = normalizeUrl(
masterPlaylist.playlists[0].uri,
masterPlaylistUrl
);
} catch (e) {
console.error('error fetching and parsing media playlist', e);
}
});
test('verify there are segments', async (done) => {
let playlist;
try {
playlist = await getPlaylist(mediaPlaylistUrl);
} catch (e) {
console.error('error verifying segments in media playlist', e);
}
const segments = playlist.segments;
expect(segments.length).toBeGreaterThan(0);
done();
});
// Iterate over segments and make sure they change.
// Use the reported duration of the segment to wait to
// fetch another just like a real HLS player would do.
var lastSegmentUrl;
for (let i = 0; i < HLS_FETCH_ITERATIONS; i++) {
test('fetch and monitor media playlist segments ' + i, async (done) => {
await new Promise((r) => setTimeout(r, 3000));
try {
var playlist = await getPlaylist(mediaPlaylistUrl);
} catch (e) {
console.error('error updating media playlist', mediaPlaylistUrl, e);
}
const segments = playlist.segments;
const segment = segments[segments.length - 1];
expect(segment.uri).not.toBe(lastSegmentUrl);
try {
var segmentUrl = normalizeUrl(segment.uri, mediaPlaylistUrl);
await validateSegments([segmentUrl]);
} catch (e) {
console.error('unable to validate HLS segment', segmentUrl, e);
}
lastSegmentUrl = segment.uri;
done();
});
}
});

11273
test/automated/hls/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,18 @@
{
"name": "owncast-test-automation",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "jest --bail"
},
"author": "",
"license": "ISC",
"dependencies": {
"m3u8-parser": "^4.7.0",
"node-fetch": "^2.6.5"
},
"devDependencies": {
"jest": "^26.6.3"
}
}

81
test/automated/hls/run.sh Executable file
View File

@ -0,0 +1,81 @@
#!/bin/bash
set -e
function start_stream() {
# Start streaming the test file over RTMP to
# the local owncast instance.
ffmpeg -hide_banner -loglevel panic -stream_loop -1 -re -i ../test.mp4 -vcodec libx264 -profile:v main -sc_threshold 0 -b:v 1300k -acodec copy -f flv rtmp://127.0.0.1/live/abc123 &
STREAMING_CLIENT=$!
}
function update_storage_config() {
echo "Configuring external storage to use ${S3_BUCKET}..."
# Hard coded to admin:abc123 for auth
curl 'http://localhost:8080/api/admin/config/s3' \
-H 'Authorization: Basic YWRtaW46YWJjMTIz' \
--data-raw "{\"value\":{\"accessKey\":\"${S3_ACCESS_KEY}\",\"acl\":\"\",\"bucket\":\"${S3_BUCKET}\",\"enabled\":true,\"endpoint\":\"${S3_ENDPOINT}\",\"region\":\"${S3_REGION}\",\"secret\":\"${S3_SECRET}\",\"servingEndpoint\":\"\"}}"
}
TEMP_DB=$(mktemp)
# Install the node test framework
npm install --silent >/dev/null
# Download a specific version of ffmpeg
if [ ! -d "ffmpeg" ]; then
mkdir ffmpeg
pushd ffmpeg >/dev/null
curl -sL https://github.com/vot/ffbinaries-prebuilt/releases/download/v4.2.1/ffmpeg-4.2.1-linux-64.zip --output ffmpeg.zip >/dev/null
unzip -o ffmpeg.zip >/dev/null
PATH=$PATH:$(pwd)
popd >/dev/null
fi
pushd ../../.. >/dev/null
# Build and run owncast from source
go build -o owncast main.go pkged.go
./owncast -database $TEMP_DB &
SERVER_PID=$!
function finish {
echo "Cleaning up..."
rm $TEMP_DB
kill $SERVER_PID $STREAMING_CLIENT
}
trap finish EXIT
popd >/dev/null
sleep 5
# Start the stream.
start_stream
echo "Waiting..."
sleep 13
# Run tests against a fresh install with no settings.
npm test
# Determine if we should continue testing with S3 configuration.
if [[ -z "${S3_BUCKET}" ]]; then
echo "No S3 configuration set"
exit 0
fi
# Kill the stream.
kill $STREAMING_CLIENT
sleep 5
# Update the server config to use S3 for storage.
update_storage_config
# start the stream.
start_stream
echo "Waiting..."
sleep 13
# Re-run the HLS test against the external storage configuration.
npm test