Skip to content
This repository was archived by the owner on Nov 11, 2024. It is now read-only.

Commit d6ad4ab

Browse files
author
Pengfei Qu
committed
add batch service to trigger transcoding only
1 parent 4765b23 commit d6ad4ab

File tree

24 files changed

+377
-88
lines changed

24 files changed

+377
-88
lines changed

CMakeLists.txt

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,14 @@ if (NOT DEFINED NLIVES)
1414
set(NLIVES "1")
1515
endif()
1616

17+
if (NOT DEFINED SCENARIO)
18+
set(SCENARIO "cdn")
19+
endif()
20+
21+
if (SCENARIO STREQUAL "batch")
22+
set(NLIVES "0")
23+
endif()
24+
1725
file(GLOB dirs "deployment" "*")
1826
list(REMOVE_DUPLICATES dirs)
1927
foreach(dir ${dirs})
@@ -24,5 +32,6 @@ endforeach()
2432

2533
# legal message
2634
execute_process(COMMAND printf "\nThis script will build third party components licensed under various open source licenses into your container images. The terms under which those components may be used and distributed can be found with the license document that is provided with those components. Please familiarize yourself with those terms to ensure your distribution of those components complies with the terms of those licenses.\n\n")
27-
execute_process(COMMAND printf "\n-- Setting: NVODS=${NVODS}, NLIVES=${NLIVES}\n")
35+
execute_process(COMMAND printf "\n-- Setting: SCENARIO=${SCENARIO}\n")
36+
execute_process(COMMAND printf "-- Setting: NVODS=${NVODS}, NLIVES=${NLIVES}\n")
2837
execute_process(COMMAND printf "-- Setting: REGISTRY=${REGISTRY}\n")

batch/.dockerignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
CMakeLists.txt
2+
*.m4
3+
test/*

batch/CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
set(service "ovc_batch_service")
2+
include("${CMAKE_SOURCE_DIR}/script/service.cmake")

batch/Dockerfile

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
2+
FROM openvisualcloud/xeon-ubuntu1804-media-nginx:20.7
3+
4+
Run DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y -q --no-install-recommends python3-setuptools python3-redis python-celery-common python3-tornado python3-kafka python3-kazoo openssh-server && rm -rf /var/lib/apt/lists/*
5+
6+
COPY *.py /home/
7+
COPY *.json /home/
8+
COPY *.conf /etc/nginx/
9+
CMD ["/bin/bash","-c","/home/main.py&/usr/local/sbin/nginx"]
10+
WORKDIR /home
11+
12+
####
13+
ARG UID
14+
ARG GID
15+
## must use ; here to ignore user exist status code
16+
RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} docker; \
17+
[ ${UID} -gt 0 ] && useradd -d /home/docker -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} docker; \
18+
touch /var/run/nginx.pid && \
19+
mkdir -p /var/log/nginx /var/lib/nginx /var/www/video /var/www/archive && \
20+
chown -R ${UID}:${GID} /var/run/nginx.pid /var/www /var/log/nginx /var/lib/nginx
21+
USER ${UID}
22+
####

batch/build.sh

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
#!/bin/bash -e
2+
3+
IMAGE="ovc_batch_service"
4+
DIR=$(dirname $(readlink -f "$0"))
5+
6+
. "${DIR}/../script/build.sh"

batch/main.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
#!/usr/bin/python3
2+
3+
from os.path import isfile
4+
from messaging import Producer
5+
import time
6+
from os import listdir
7+
import json
8+
9+
KAFKA_TOPIC_VODS = "content_provider_sched_vods"
10+
KAFKA_TOPIC_LIVES = "content_provider_sched_lives"
11+
DASHLS_ROOT = "/var/www"
12+
ARCHIVE_ROOT = "/var/www/archive"
13+
14+
config_file="/home/transcoding.json"
15+
16+
streams = [s for s in listdir(ARCHIVE_ROOT) if s.endswith((".mp4", ".avi"))]
17+
18+
info={}
19+
with open(config_file,"rt") as fd:
20+
info=json.load(fd)
21+
22+
print(info,flush=True)
23+
24+
producer = Producer()
25+
for stream in info[0]["vods"]:
26+
# schedule producing the stream
27+
if stream["name"] in streams:
28+
msg=stream
29+
print("start VOD transccoding on {} with {}: ".format(stream["name"],stream["type"]), flush=True)
30+
print(msg,flush=True)
31+
producer.send(KAFKA_TOPIC_VODS, json.dumps(msg))
32+
# wait until file is available, return it
33+
start_time = time.time()
34+
while time.time() - start_time < 60:
35+
if isfile(DASHLS_ROOT+"/"+stream["name"]): break
36+
time.sleep(1)
37+
38+
producer.close()
39+
40+
while True:
41+
print("Running...",flush=True)
42+
time.sleep(30)
43+

batch/messaging.py

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
#!/usr/bin/python3
2+
3+
import socket
4+
from kafka import KafkaProducer, KafkaConsumer, TopicPartition
5+
import traceback
6+
import socket
7+
import time
8+
9+
KAFKA_HOSTS = ["kafka-service:9092"]
10+
11+
class Producer(object):
12+
def __init__(self):
13+
super(Producer, self).__init__()
14+
self._client_id = socket.gethostname()
15+
self._producer = None
16+
17+
def send(self, topic, message):
18+
if not self._producer:
19+
try:
20+
self._producer = KafkaProducer(bootstrap_servers=KAFKA_HOSTS,
21+
client_id=self._client_id,
22+
api_version=(0, 10), acks=0)
23+
except:
24+
print(traceback.format_exc(), flush=True)
25+
self._producer = None
26+
27+
try:
28+
self._producer.send(topic, message.encode('utf-8'))
29+
except:
30+
print(traceback.format_exc(), flush=True)
31+
32+
def flush(self):
33+
if self._producer:
34+
self._producer.flush()
35+
36+
def close(self):
37+
if self._producer:
38+
self._producer.close()
39+
self._producer=None
40+
41+
class Consumer(object):
42+
def __init__(self, group=None):
43+
super(Consumer, self).__init__()
44+
self._client_id = socket.gethostname()
45+
self._group = group
46+
47+
def messages(self, topic, timeout=None):
48+
c = KafkaConsumer(topic, bootstrap_servers=KAFKA_HOSTS, client_id=self._client_id,
49+
group_id=self._group, auto_offset_reset="earliest", api_version=(0, 10))
50+
51+
for msg in c:
52+
yield msg.value.decode('utf-8')
53+
c.close()
54+
55+
def debug(self, topic):
56+
c = KafkaConsumer(bootstrap_servers=KAFKA_HOSTS, client_id=self._client_id,
57+
group_id=None, api_version=(0, 10))
58+
59+
# assign/subscribe topic
60+
partitions = c.partitions_for_topic(topic)
61+
if not partitions:
62+
raise Exception("Topic "+topic+" not exist")
63+
c.assign([TopicPartition(topic, p) for p in partitions])
64+
65+
# seek to beginning if needed
66+
c.seek_to_beginning()
67+
68+
# fetch messages
69+
while True:
70+
partitions = c.poll(100)
71+
if partitions:
72+
for p in partitions:
73+
for msg in partitions[p]:
74+
yield msg.value.decode('utf-8')
75+
yield ""
76+
77+
c.close()

batch/nginx.conf

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
2+
worker_processes auto;
3+
worker_rlimit_nofile 8192;
4+
daemon off;
5+
error_log /var/www/log/error.log warn;
6+
7+
events {
8+
worker_connections 4096;
9+
}
10+
11+
rtmp {
12+
server {
13+
listen 1935;
14+
chunk_size 4000;
15+
16+
application stream {
17+
live on;
18+
}
19+
20+
application hls {
21+
live on;
22+
hls on;
23+
hls_path /var/www/video/hls;
24+
hls_nested on;
25+
hls_fragment 3;
26+
hls_playlist_length 60;
27+
hls_variant _low BANDWIDTH=2048000 RESOLUTION=854x480;
28+
hls_variant _mid BANDWIDTH=4096000 RESOLUTION=1280x720;
29+
hls_variant _hi BANDWIDTH=8192000 RESOLUTION=1920x1080;
30+
}
31+
32+
application dash {
33+
live on;
34+
dash on;
35+
dash_path /var/www/video/dash;
36+
dash_fragment 4;
37+
dash_playlist_length 120;
38+
dash_nested on;
39+
dash_repetition on;
40+
dash_cleanup on;
41+
dash_variant _low bandwidth="2048000" width="854" height="480";
42+
dash_variant _med bandwidth="4096000" width="1280" height="720";
43+
dash_variant _hi bandwidth="8192000" width="1920" height="1080" max;
44+
}
45+
}
46+
}

batch/shell.sh

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
#!/bin/bash -e
2+
3+
IMAGE="ovc_batch_service"
4+
DIR=$(dirname $(readlink -f "$0"))
5+
OPTIONS=("--volume=${DIR}/../../volume/video/archive:/var/www/archive:ro" "--volume=${DIR}/../../volume/video/dash:/var/www/dash:ro" "--volume=${DIR}/../../volume/video/hls:/var/www/hls:ro")
6+
7+
. "${DIR}/../script/shell.sh"

batch/transcoding.json

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
[{
2+
"vods": [{
3+
"name": "bbb_sunflower_1080p_30fps_normal.mp4",
4+
"type": "hls",
5+
"parameters": [
6+
[3840, 2160, 14000000, 192000],
7+
[2560, 1440, 10000000, 192000],
8+
[1920, 1080, 5000000, 192000],
9+
[1280, 720, 2800000, 192000],
10+
[842, 480, 1400000, 128000],
11+
[640, 360, 800000, 128000]
12+
],
13+
"codec": "AVC",
14+
"loop": 0,
15+
"target": "file",
16+
"platform": "software"
17+
},{
18+
"name": "bbb_sunflower_1080p_30fps_normal.mp4",
19+
"type": "dash",
20+
"parameters": [
21+
[3840, 2160, 14000000, 192000],
22+
[2560, 1440, 10000000, 192000],
23+
[1920, 1080, 5000000, 192000],
24+
[1280, 720, 2800000, 192000],
25+
[842, 480, 1400000, 128000],
26+
[640, 360, 800000, 128000]
27+
],
28+
"codec": "AVC",
29+
"loop": 0,
30+
"target": "file",
31+
"platform": "software"
32+
}],
33+
"lives": [{
34+
"name": "bbb_sunflower_1080p_30fps_normal.mp4",
35+
"type": "hls",
36+
"parameters": [
37+
[1920, 1080, 5000000, 192000]
38+
],
39+
"codec": "AVC",
40+
"loop": 1,
41+
"target": "rtmp",
42+
"platform": "software"
43+
},{
44+
"name": "bbb_sunflower_1080p_30fps_normal.mp4",
45+
"type": "dash",
46+
"parameters": [
47+
[1920, 1080, 5000000, 192000]
48+
],
49+
"codec": "AVC",
50+
"loop": 1,
51+
"target": "rtmp",
52+
"platform": "software"
53+
}]
54+
}]

cdn-server/schedule.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,29 @@
44
from tornado import web, gen
55
from messaging import Producer
66
import time
7+
import json
78

8-
KAFKA_TOPIC = "content_provider_sched"
9+
KAFKA_TOPIC_VODS = "content_provider_sched_vods"
910
DASHLS_ROOT = "/var/www"
1011

1112
class ScheduleHandler(web.RequestHandler):
1213
@gen.coroutine
1314
def get(self):
14-
stream = self.request.uri.replace("/schedule/", "")
15+
stream = self.requeist.uri.replace("/schedule/", "")
1516

1617
# schedule producing the stream
1718
print("request received to process stream: "+stream, flush=True)
1819
producer = Producer()
19-
producer.send(KAFKA_TOPIC, stream)
20+
msg.update({
21+
"name":stream.split("/")[1],
22+
"type":stream.split("/")[0],
23+
"parameters": [ ],
24+
"codec": "AVC",
25+
"loop": 0,
26+
"target": "file",
27+
"platform": "software"
28+
})
29+
producer.send(KAFKA_TOPIC_VODS, msg)
2030
producer.close()
2131

2232
# wait until file is available, return it

deployment/docker-swarm/build.sh

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,10 @@
22

33
DIR=$(dirname $(readlink -f "$0"))
44
NVODS="${1:-1}"
5-
REGISTRY="$3"
5+
SCENARIO="${3:-cdn}"
6+
REGISTRY="$4"
67

78
rm -rf "$DIR/../../volume/video/cache"
89
mkdir -p "$DIR/../../volume/video/cache/hls" "$DIR/../../volume/video/cache/dash"
910

10-
m4 -DNVODS=${NVODS} -DREGISTRY_PREFIX=${REGISTRY} -I "${DIR}" "${DIR}/docker-compose.yml.m4" > "${DIR}/docker-compose.yml"
11+
m4 -DNVODS=${NVODS} -DSCENARIO=${SCENARIO} -DREGISTRY_PREFIX=${REGISTRY} -I "${DIR}" "${DIR}/docker-compose.yml.m4" > "${DIR}/docker-compose.yml"

deployment/kubernetes/helm/build.sh

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,14 @@
33
DIR=$(dirname $(readlink -f "$0"))
44
NVODS="${1:-1}"
55
NLIVES="${2:-1}"
6-
REGISTRY="$3"
6+
SCENARIO="${3:-cdn}"
7+
REGISTRY="$4"
78
HOSTIP=$(ip route get 8.8.8.8 | awk '/ src /{split(substr($0,index($0," src ")),f);print f[2];exit}')
89

910
# make sure helm is functional
1011
helm version >/dev/null 2>/dev/null || exit 0
1112

1213
echo "Generating helm chart"
1314
. "${DIR}/../volume-info.sh"
14-
m4 -DREGISTRY_PREFIX=${REGISTRY} -DNVODS=${NVODS} -DNLIVES=${NLIVES} -DUSERID=$(id -u) -DGROUPID=$(id -g) -DHOSTIP=${HOSTIP} $(env | grep _VOLUME_ | sed 's/^/-D/') -I "${DIR}/cdn-transcode" "$DIR/cdn-transcode/values.yaml.m4" > "$DIR/cdn-transcode/values.yaml"
15+
m4 -DREGISTRY_PREFIX=${REGISTRY} -DNVODS=${NVODS} -DNLIVES=${NLIVES} -DSCENARIO=${SCENARIO} -DUSERID=$(id -u) -DGROUPID=$(id -g) -DHOSTIP=${HOSTIP} $(env | grep _VOLUME_ | sed 's/^/-D/') -I "${DIR}/cdn-transcode" "$DIR/cdn-transcode/values.yaml.m4" > "$DIR/cdn-transcode/values.yaml"
1516

deployment/kubernetes/yaml/build.sh

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,12 @@
33
DIR=$(dirname $(readlink -f "$0"))
44
NVODS="${1:-1}"
55
NLIVES="${2:-1}"
6-
REGISTRY="$3"
6+
SCENARIO="${3:-cdn}"
7+
REGISTRY="$4"
78
HOSTIP=$(ip route get 8.8.8.8 | awk '/ src /{split(substr($0,index($0," src ")),f);print f[2];exit}')
89

910
. "${DIR}/../volume-info.sh"
11+
echo "NVODS=${NVODS} NLIVES=${NLIVES} SCENARIO=${SCENARIO}"
1012
for template in $(find "${DIR}" -maxdepth 1 -name "*.yaml.m4" -print); do
11-
m4 -DNVODS=${NVODS} -DNLIVES=${NLIVES} -DHOSTIP=${HOSTIP} -DREGISTRY_PREFIX=${REGISTRY} $(env | grep _VOLUME_ | sed 's/^/-D/') -I "${DIR}" "${template}" > "${template/.m4/}"
13+
m4 -DNVODS=${NVODS} -DNLIVES=${NLIVES} -DSCENARIO=${SCENARIO} -DHOSTIP=${HOSTIP} -DREGISTRY_PREFIX=${REGISTRY} $(env | grep _VOLUME_ | sed 's/^/-D/') -I "${DIR}" "${template}" > "${template/.m4/}"
1214
done

deployment/kubernetes/yaml/cdn-deploy.yaml.m4

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,12 @@ spec:
2020
enableServiceLinks: false
2121
containers:
2222
- name: cdn
23-
image: defn(`REGISTRY_PREFIX')ovc_cdn_service:latest
23+
image: defn(`REGISTRY_PREFIX')`ovc_'defn(`SCENARIO')_service:latest
2424
imagePullPolicy: IfNotPresent
2525
ports:
26+
ifelse(defn(`SCENARIO'),`cdn',`dnl
2627
- containerPort: 8443
28+
')dnl
2729
- containerPort: 1935
2830
resources:
2931
limits:
@@ -51,3 +53,4 @@ spec:
5153
secret:
5254
secretName: self-signed-certificate
5355
PLATFORM_NODE_SELECTOR(`Xeon')dnl
56+

0 commit comments

Comments
 (0)