diff --git a/carsrun/PiCamera_H264_Server.py b/carsrun/PiCamera_H264_Server.py new file mode 100644 index 0000000..3950f23 --- /dev/null +++ b/carsrun/PiCamera_H264_Server.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +# vuquangtrong.github.io + +import io +import picamera +import time +from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer +from wsgiref.simple_server import make_server +from ws4py.websocket import WebSocket +from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIHandler, WebSocketWSGIRequestHandler +from ws4py.server.wsgiutils import WebSocketWSGIApplication +from threading import Thread, Condition + + +class FrameBuffer(object): + def __init__(self): + self.frame = None + self.buffer = io.BytesIO() + self.condition = Condition() + + def write(self, buf): + if buf.startswith(b'\x00\x00\x00\x01'): + with self.condition: + self.buffer.seek(0) + self.buffer.write(buf) + self.buffer.truncate() + self.frame = self.buffer.getvalue() + self.condition.notify_all() + + +def stream(): + with picamera.PiCamera(resolution='640x480', framerate=24) as camera: + broadcasting = True + frame_buffer = FrameBuffer() + camera.start_recording(frame_buffer, format='h264', profile="baseline") + try: + WebSocketWSGIHandler.http_version = '1.1' + websocketd = make_server('', 9000, server_class=WSGIServer, + handler_class=WebSocketWSGIRequestHandler, + app=WebSocketWSGIApplication(handler_cls=WebSocket)) + websocketd.initialize_websockets_manager() + websocketd_thread = Thread(target=websocketd.serve_forever) + + httpd = ThreadingHTTPServer(('', 8000), SimpleHTTPRequestHandler) + httpd_thread = Thread(target=httpd.serve_forever) + + try: + websocketd_thread.start() + httpd_thread.start() + while broadcasting: + with frame_buffer.condition: + frame_buffer.condition.wait() + websocketd.manager.broadcast(frame_buffer.frame, binary=True) + except KeyboardInterrupt: + pass + finally: + websocketd.shutdown() + httpd.shutdown() + broadcasting = False + raise KeyboardInterrupt + except KeyboardInterrupt: + pass + finally: + camera.stop_recording() + +if __name__ == "__main__": + stream() diff --git a/carsrun/__pycache__/PiCamera_H264_Server.cpython-311.pyc b/carsrun/__pycache__/PiCamera_H264_Server.cpython-311.pyc new file mode 100644 index 0000000..204dcf4 Binary files /dev/null and b/carsrun/__pycache__/PiCamera_H264_Server.cpython-311.pyc differ diff --git a/carsrun/__pycache__/PiCamera_H264_Server.cpython-39.pyc b/carsrun/__pycache__/PiCamera_H264_Server.cpython-39.pyc new file mode 100644 index 0000000..7576683 Binary files /dev/null and b/carsrun/__pycache__/PiCamera_H264_Server.cpython-39.pyc differ diff --git a/carsrun/__pycache__/base_ctrl.cpython-311.pyc b/carsrun/__pycache__/base_ctrl.cpython-311.pyc new file mode 100644 index 0000000..e60b7c7 Binary files /dev/null and b/carsrun/__pycache__/base_ctrl.cpython-311.pyc differ diff --git a/carsrun/__pycache__/camera_pi.cpython-311.pyc b/carsrun/__pycache__/camera_pi.cpython-311.pyc new file mode 100644 index 0000000..4ee0a89 Binary files /dev/null and b/carsrun/__pycache__/camera_pi.cpython-311.pyc differ diff --git a/carsrun/__pycache__/camera_pi.cpython-39.pyc b/carsrun/__pycache__/camera_pi.cpython-39.pyc new file mode 100644 index 0000000..acb4015 Binary files /dev/null and b/carsrun/__pycache__/camera_pi.cpython-39.pyc differ diff --git a/carsrun/__pycache__/car.cpython-311.pyc b/carsrun/__pycache__/car.cpython-311.pyc new file mode 100644 index 0000000..196df95 Binary files /dev/null and b/carsrun/__pycache__/car.cpython-311.pyc differ diff --git a/carsrun/__pycache__/car.cpython-39.pyc b/carsrun/__pycache__/car.cpython-39.pyc new file mode 100644 index 0000000..7be1857 Binary files /dev/null and b/carsrun/__pycache__/car.cpython-39.pyc differ diff --git a/carsrun/appCam.py b/carsrun/appCam.py new file mode 100644 index 0000000..1568e9e --- /dev/null +++ b/carsrun/appCam.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# appCam.py +# based on tutorial ==> https://blog.miguelgrinberg.com/post/video-streaming-with-flask +# PiCam Local Web Server with Flask +# MJRoBot.org 19Jan18 + +from flask import Flask, render_template, Response, redirect, url_for, send_file, jsonify, request + +# Raspberry Pi camera module (requires picamera package) +#from camera_pi import Camera +from picamera2 import Picamera2 +import os +import time +from gevent import pywsgi +from car import CAR +#from PiCamera_H264_Server import stream +import threading +import cv2 +app = Flask(__name__, static_url_path='') + +def gen_frames(): # generate frame by frame from camera + picam2 = Picamera2() + picam2.configure(picam2.create_video_configuration(main={"format": 'XRGB8888', "size": (640, 480)})) + picam2.start() + while True: + # Capture frame-by-frame + frame = picam2.capture_array() # read the camera frame + frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + ret, buffer = cv2.imencode('.jpg', frame) + frame = buffer.tobytes() + yield (b'--frame\r\n' + b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') # concat frame one by one and show result + + +@app.route('/') +def index(): + return render_template('index-t.html') + +@app.route('/video_feed') +def video_feed(): + #Video streaming route. Put this in the src attribute of an img tag + return Response(gen_frames(), mimetype='multipart/x-mixed-replace; boundary=frame') + +# def gen(camera): +# """Video streaming generator function.""" +# while True: +# frame = camera.get_frame() +# yield (b'--frame\r\n' +# b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') + +# @app.route('/capture') +# def capture(): +# pic = open("qrcode.png", "wb") +# frame = Camera().get_frame() +# pic.write(frame) +# return Response(b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n', +# mimetype='multipart/x-mixed-replace; boundary=frame') +# #return send_file("qrcode.png", mimetype='image/png') +# #return redirect(url_for('index')) + +# @app.route('/video_feed') +# def video_feed(): +# """Video streaming route. Put this in the src attribute of an img tag.""" +# return Response(gen(Camera()), +# mimetype='multipart/x-mixed-replace; boundary=frame') +car = CAR() +@app.route('/control/') +def control_index(): + word = """指令:\n + /led: 灯光闪烁\n + /led_light: 打开全部灯光\n + /led_dark: 关闭全部灯光\n + /stop(/Q): 小车停止运动\n + /forward(/W): 小车开始运动\n + /back(/S): 小车向后运动\n + /left(/A): 小车向左运动\n + /right(/D): 小车向右运动\n""" + print(word) + return word + +# def execute_forward_function(): +# url = "http://192.168.185.242:80/send_command" # 示例URL,实际使用时需要替换为正确的服务器地址 +# data = {"command": 'base -c {"T":1,"L":0.5,"R":0.5}'} # 请求体数据 + +# # 发送请求并打印返回结果 +# try: +# response = request.post(url, data=data) +# print(response.text) # 打印服务器返回的内容 +# except request.exceptions.RequestException as e: +# print(f"请求发生错误: {e}") +# print("执行前进功能") +# # 返回一些结果 +# return "前进功能已执行" + +# @app.route('/control/forward', methods=['GET']) +# def control_forward(): +# try: +# # 获取前端发送的数据 +# data = request.args + +# # 调试输出接收到的数据 +# print("接收到的数据:", data) + +# # 执行你的函数 +# result = execute_forward_function() # 确保这个函数是可以被调用的,如果有必要打印它的返回值 + +# # 返回JSON响应, result 需要确保是可json化的对象 +# return jsonify({ +# "resultCode": 200, +# "message": "请求成功", +# "data": result +# }) +# except Exception as e: +# print(f"发生错误: {e}") # 打印异常 +# return jsonify({"resultCode": 500, "message": "内部服务器错误"}), 500 # 返回500错误 + + + +@app.route('/control/') +def fun(info): + if hasattr(car, info): + getattr(car, info)() + return 'Run: '+info+'\n' + else: + return 'Error: '+info+' not be defined\n' + + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port =80, debug=True, threaded=True) + # t = threading.Thread(target=stream) + # t.start() + # server = pywsgi.WSGIServer(('0.0.0.0', 80), app) + # server.serve_forever() diff --git a/carsrun/apptest.py b/carsrun/apptest.py new file mode 100644 index 0000000..89b9d9e --- /dev/null +++ b/carsrun/apptest.py @@ -0,0 +1,31 @@ +from flask import Flask, render_template, Response +from picamera2 import Picamera2 +import time +import cv2 + +app = Flask(__name__) + +def gen_frames(): # generate frame by frame from camera + picam2 = Picamera2() + picam2.configure(picam2.create_video_configuration(main={"format": 'XRGB8888', "size": (640, 480)})) + picam2.start() + while True: + # Capture frame-by-frame + frame = picam2.capture_array() # read the camera frame + frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + ret, buffer = cv2.imencode('.jpg', frame) + frame = buffer.tobytes() + yield (b'--frame\r\n' + b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') # concat frame one by one and show result + +@app.route('/') +def index(): + return render_template('index.html') + +@app.route('/video_feed') +def video_feed(): + #Video streaming route. Put this in the src attribute of an img tag + return Response(gen_frames(), mimetype='multipart/x-mixed-replace; boundary=frame') + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=5000, debug=True) diff --git a/carsrun/base_ctrl.py b/carsrun/base_ctrl.py new file mode 100644 index 0000000..2b25e9b --- /dev/null +++ b/carsrun/base_ctrl.py @@ -0,0 +1,286 @@ +import serial +import json +import queue +import threading +import yaml +import os +import time +import glob +import numpy as np + +curpath = os.path.realpath(__file__) +thisPath = os.path.dirname(curpath) +with open(thisPath + '/config.yaml', 'r') as yaml_file: + f = yaml.safe_load(yaml_file) + +class ReadLine: + def __init__(self, s): + self.buf = bytearray() + self.s = s + + self.sensor_data = [] + self.sensor_list = [] + try: + self.sensor_data_ser = serial.Serial(glob.glob('/dev/ttyUSB*')[0], 115200) + print("/dev/ttyUSB* connected succeed") + except: + self.sensor_data_ser = None + self.sensor_data_max_len = 51 + + try: + self.lidar_ser = serial.Serial(glob.glob('/dev/ttyACM*')[0], 230400, timeout=1) + print("/dev/ttyACM* connected succeed") + except: + self.lidar_ser = None + self.ANGLE_PER_FRAME = 12 + self.HEADER = 0x54 + self.lidar_angles = [] + self.lidar_distances = [] + self.lidar_angles_show = [] + self.lidar_distances_show = [] + self.last_start_angle = 0 + + def readline(self): + i = self.buf.find(b"\n") + if i >= 0: + r = self.buf[:i+1] + self.buf = self.buf[i+1:] + return r + while True: + i = max(1, min(512, self.s.in_waiting)) + data = self.s.read(i) + i = data.find(b"\n") + if i >= 0: + r = self.buf + data[:i+1] + self.buf[0:] = data[i+1:] + return r + else: + self.buf.extend(data) + + def clear_buffer(self): + self.s.reset_input_buffer() + + def read_sensor_data(self): + if self.sensor_data_ser == None: + return + + try: + buffer_clear = False + while self.sensor_data_ser.in_waiting > 0: + buffer_clear = True + sensor_readline = self.sensor_data_ser.readline() + if len(sensor_readline) <= self.sensor_data_max_len: + self.sensor_list.append(sensor_readline.decode('utf-8')[:-2]) + else: + self.sensor_list.append(sensor_readline.decode('utf-8')[:self.sensor_data_max_len]) + self.sensor_list.append(sensor_readline.decode('utf-8')[self.sensor_data_max_len:-2]) + if buffer_clear: + self.sensor_data = self.sensor_list.copy() + self.sensor_list.clear() + self.sensor_data_ser.reset_input_buffer() + except Exception as e: + print(f"[base_ctrl.read_sensor_data] error: {e}") + + def parse_lidar_frame(self, data): + # header = data[0] + # verlen = data[1] + # speed = data[3] << 8 | data[2] + start_angle = (data[5] << 8 | data[4]) * 0.01 + # print(start) + # end_angle = (data[43] << 8 | data[42]) * 0.01 + for i in range(0, self.ANGLE_PER_FRAME): + offset = 6 + i * 3 + distance = data[offset+1] << 8 | data[offset] + confidence = data[offset+2] + # lidar_angles.append(np.radians(start_angle + i * 0.167)) + self.lidar_angles.append(np.radians(start_angle + i * 0.83333 + 180)) + # lidar_angles.append(np.radians(start_angle + end_angle)) + self.lidar_distances.append(distance) + # end_angle = (data[43] << 8 | data[42]) * 0.01 + # timestamp = data[45] << 8 | data[44] + # crc = data[46] + return start_angle + + def lidar_data_recv(self): + if self.lidar_ser == None: + return + try: + while True: + self.header = self.lidar_ser.read(1) + if self.header == b'\x54': + # Read the rest of the data + data = self.header + self.lidar_ser.read(46) + hex_data = [int(hex(byte), 16) for byte in data] + start_angle = self.parse_lidar_frame(hex_data) + if self.last_start_angle > start_angle: + break + self.last_start_angle = start_angle + else: + self.lidar_ser.flushInput() + + self.last_start_angle = start_angle + self.lidar_angles_show = self.lidar_angles.copy() + self.lidar_distances_show = self.lidar_distances.copy() + self.lidar_angles.clear() + self.lidar_distances.clear() + except Exception as e: + print(f"[base_ctrl.lidar_data_recv] error: {e}") + self.lidar_ser = serial.Serial(glob.glob('/dev/ttyACM*')[0], 230400, timeout=1) + + +class BaseController: + + def __init__(self, uart_dev_set, buad_set): + self.ser = serial.Serial(uart_dev_set, buad_set, timeout=1) + self.rl = ReadLine(self.ser) + self.command_queue = queue.Queue() + self.command_thread = threading.Thread(target=self.process_commands, daemon=True) + self.command_thread.start() + + self.base_light_status = 0 + self.head_light_status = 0 + + self.data_buffer = None + self.base_data = None + + self.use_lidar = f['base_config']['use_lidar'] + self.extra_sensor = f['base_config']['extra_sensor'] + + + def feedback_data(self): + try: + while self.rl.s.in_waiting > 0: + self.data_buffer = json.loads(self.rl.readline().decode('utf-8')) + if 'T' in self.data_buffer: + self.base_data = self.data_buffer + self.data_buffer = None + if self.base_data["T"] == 1003: + print(self.base_data) + return self.base_data + self.rl.clear_buffer() + self.data_buffer = json.loads(self.rl.readline().decode('utf-8')) + self.base_data = self.data_buffer + return self.base_data + except Exception as e: + self.rl.clear_buffer() + print(f"[base_ctrl.feedback_data] error: {e}") + + + def on_data_received(self): + self.ser.reset_input_buffer() + data_read = json.loads(self.rl.readline().decode('utf-8')) + return data_read + + + def send_command(self, data): + self.command_queue.put(data) + + + def process_commands(self): + while True: + data = self.command_queue.get() + self.ser.write((json.dumps(data) + '\n').encode("utf-8")) + + + def base_json_ctrl(self, input_json): + self.send_command(input_json) + + + def gimbal_emergency_stop(self): + data = {"T":0} + self.send_command(data) + + + def base_speed_ctrl(self, input_left, input_right): + data = {"T":1,"L":input_left,"R":input_right} + self.send_command(data) + + + def gimbal_ctrl(self, input_x, input_y, input_speed, input_acceleration): + data = {"T":133,"X":input_x,"Y":input_y,"SPD":input_speed,"ACC":input_acceleration} + self.send_command(data) + + + def gimbal_base_ctrl(self, input_x, input_y, input_speed): + data = {"T":141,"X":input_x,"Y":input_y,"SPD":input_speed} + self.send_command(data) + + + def base_oled(self, input_line, input_text): + data = {"T":3,"lineNum":input_line,"Text":input_text} + self.send_command(data) + + + def base_default_oled(self): + data = {"T":-3} + self.send_command(data) + + + def bus_servo_id_set(self, old_id, new_id): + # data = {"T":54,"old":old_id,"new":new_id} + data = {"T":f['cmd_config']['cmd_set_servo_id'],"raw":old_id,"new":new_id} + self.send_command(data) + + + def bus_servo_torque_lock(self, input_id, input_status): + # data = {"T":55,"id":input_id,"status":input_status} + data = {"T":f['cmd_config']['cmd_servo_torque'],"id":input_id,"cmd":input_status} + self.send_command(data) + + + def bus_servo_mid_set(self, input_id): + # data = {"T":58,"id":input_id} + data = {"T":f['cmd_config']['cmd_set_servo_mid'],"id":input_id} + self.send_command(data) + + + def lights_ctrl(self, pwmA, pwmB): + data = {"T":132,"IO4":pwmA,"IO5":pwmB} + self.send_command(data) + self.base_light_status = pwmA + self.head_light_status = pwmB + + + def base_lights_ctrl(self): + if self.base_light_status != 0: + self.base_light_status = 0 + else: + self.base_light_status = 255 + self.lights_ctrl(self.base_light_status, self.head_light_status) + + def gimbal_dev_close(self): + self.ser.close() + + def breath_light(self, input_time): + breath_start_time = time.time() + while time.time() - breath_start_time < input_time: + for i in range(0, 128, 10): + self.lights_ctrl(i, 128-i) + time.sleep(0.1) + for i in range(0, 128, 10): + self.lights_ctrl(128-i, i) + time.sleep(0.1) + self.lights_ctrl(0, 0) + + +if __name__ == '__main__': + # RPi5 + base = BaseController('/dev/ttyAMA0', 115200) + + # RPi4B + # base = BaseController('/dev/serial0', 115200) + + # breath light for 15s + base.breath_light(15) + + # gimble ctrl, look forward + # x y spd acc + base.gimbal_ctrl(0, 0, 10, 0) + + # x(-180 ~ 180) + # x- look left + # x+ look right + + # y(-30 ~ 90) + # y- look down + # y+ look up \ No newline at end of file diff --git a/carsrun/camera_pi.py b/carsrun/camera_pi.py new file mode 100644 index 0000000..471ee3f --- /dev/null +++ b/carsrun/camera_pi.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# camera_pi.py +# +# +# +import time +import io +import threading +import picamera + + +class Camera(object): + thread = None # background thread that reads frames from camera + frame = None # current frame is stored here by background thread + last_access = 0 # time of last client access to the camera + + def initialize(self): + if Camera.thread is None: + # start background frame thread + Camera.thread = threading.Thread(target=self._thread) + Camera.thread.start() + + # wait until frames start to be available + while self.frame is None: + time.sleep(0) + + def get_frame(self): + Camera.last_access = time.time() + self.initialize() + return self.frame + + @classmethod + def _thread(cls): + with picamera.PiCamera() as camera: + # camera setup + camera.resolution = (320, 240) + camera.hflip = True + camera.vflip = True + + # let camera warm up + camera.start_preview() + time.sleep(2) + + stream = io.BytesIO() + for foo in camera.capture_continuous(stream, 'jpeg', + use_video_port=True): + # store frame + stream.seek(0) + cls.frame = stream.read() + + # reset stream for next frame + stream.seek(0) + stream.truncate() + + # if there hasn't been any clients asking for frames in + # the last 10 seconds stop the thread + if time.time() - cls.last_access > 10: + break + cls.thread = None diff --git a/carsrun/car.py b/carsrun/car.py new file mode 100644 index 0000000..c72f49a --- /dev/null +++ b/carsrun/car.py @@ -0,0 +1,147 @@ +#coding:utf-8 +import os +import time +import RPi.GPIO as GPIO +from base_ctrl import BaseController +####################################### +#############信号引脚定义############## +####################################### +GPIO.setmode(GPIO.BCM) +GPIO.setwarnings(False) +def is_raspberry_pi5(): + with open('/proc/cpuinfo', 'r') as file: + for line in file: + if 'Model' in line: + if 'Raspberry Pi 5' in line: + return True + else: + return False + +base = BaseController('/dev/ttyAMA0', 115200) + +class CAR: + def __init__(self): + self.LED0 = 10 #LED0的IO口定义 + self.LED1 = 9 #LED1的IO口定义 + self.LED2 = 25 #LED2的IO口定义 + self.ENA = 13 #//L298 使能A + self.ENB = 20 #//L298 使能B + self.IN1 = 19 #//电机接口1 + self.IN2 = 16 #//电机接口2 + self.IN3 = 21 #//电机接口3 + self.IN4 = 26 #//电机接口4 + GPIO.setup(self.LED0, GPIO.OUT, initial=GPIO.HIGH) ##led0初始化为高电平 + GPIO.setup(self.LED1, GPIO.OUT, initial=GPIO.HIGH) ##led1初始化为高电平 + GPIO.setup(self.LED2, GPIO.OUT, initial=GPIO.HIGH) ##led2初始化为高电平 + GPIO.setup(self.ENA, GPIO.OUT, initial=GPIO.LOW) ##ENA初始化为低电平 + GPIO.setup(self.ENB, GPIO.OUT, initial=GPIO.LOW) ##ENB初始化为低电平 + GPIO.setup(self.IN1, GPIO.OUT, initial=GPIO.LOW) ##IN1初始化为低电平 + GPIO.setup(self.IN2, GPIO.OUT, initial=GPIO.LOW) ##IN2初始化为低电平 + GPIO.setup(self.IN3, GPIO.OUT, initial=GPIO.LOW) ##IN3初始化为低电平 + GPIO.setup(self.IN4, GPIO.OUT, initial=GPIO.LOW) ##IN4初始化为低电平 + + def led(self): + GPIO.output(self.LED0,False) + GPIO.output(self.LED1,False) + GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 亮 亮 亮 + time.sleep(0.5) + GPIO.output(self.LED0,True) + GPIO.output(self.LED1,False) + GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 灭 亮 亮 + time.sleep(0.5) + GPIO.output(self.LED0,False) + GPIO.output(self.LED1,True) + GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 亮 灭 亮 + time.sleep(0.5) + GPIO.output(self.LED0,False) + GPIO.output(self.LED1,False) + GPIO.output(self.LED2,True)###LED0,LED1,LED2 = 亮 亮 灭 + time.sleep(0.5) + GPIO.output(self.LED0,False) + GPIO.output(self.LED1,False) + GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 亮 亮 亮 + time.sleep(0.5) + GPIO.output(self.LED0,True) + GPIO.output(self.LED1,True) + GPIO.output(self.LED2,True)###LED0,LED1,LED2 = 灭 灭 灭 + time.sleep(0.5) + print("run: led") + + def led_light(self): + GPIO.output(self.LED0,False) + GPIO.output(self.LED1,False) + GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 亮 亮 亮 + print("run: led_light") + + def led_dark(self): + GPIO.output(self.LED0,True) + GPIO.output(self.LED1,True) + GPIO.output(self.LED2,True)###LED0,LED1,LED2 = 灭 灭 灭 + print("run: led_dark") + + def stop(self): # 停止运行 + GPIO.output(self.ENA,False) + GPIO.output(self.ENB,False) + GPIO.output(self.IN1,False) + GPIO.output(self.IN2,False) + GPIO.output(self.IN3,False) + GPIO.output(self.IN4,False) + print("run: stop move") + + def Q(self): # 停止的快捷键 + self.stop() + def q(self): + self.stop() + + def forward(self): # 前进 + base.send_command({"T":1,"L":0.2,"R":0.2}) + time.sleep(2) + base.send_command({"T":1,"L":0,"R":0}) + print("run: move !!!!forward") + + def W(self): # 前进的快捷键 + self.forward() + def w(self): + self.forward() + + def back(self): # 后退 + GPIO.output(self.ENA,True) + GPIO.output(self.ENB,True) + GPIO.output(self.IN1,True) + GPIO.output(self.IN2,False) + GPIO.output(self.IN3,True) + GPIO.output(self.IN4,False) + print("run: move back") + + def S(self): # 后退的快捷键 + self.back() + def s(self): + self.back() + + def left(self): # 左转 + GPIO.output(self.ENA,True) + GPIO.output(self.ENB,True) + GPIO.output(self.IN1,False) + GPIO.output(self.IN2,True) + GPIO.output(self.IN3,True) + GPIO.output(self.IN4,False) + print("run: move left") + + def A(self): # 左转的快捷键 + self.left() + def a(self): + self.left() + + def right(self): # 右转 + GPIO.output(self.ENA,True) + GPIO.output(self.ENB,True) + GPIO.output(self.IN1,True) + GPIO.output(self.IN2,False) + GPIO.output(self.IN3,False) + GPIO.output(self.IN4,True) + print("run: move right") + + def D(self): # 右转的快捷键 + self.right() + def d(self): + self.right() \ No newline at end of file diff --git a/carsrun/config.yaml b/carsrun/config.yaml new file mode 100644 index 0000000..b5949fe --- /dev/null +++ b/carsrun/config.yaml @@ -0,0 +1,107 @@ +args_config: + arm_default_e: 60 + arm_default_r: 0 + arm_default_z: 24 + max_rate: 1.0 + max_speed: 1.3 + mid_rate: 0.66 + min_rate: 0.3 + slow_speed: 0.2 +audio_config: + audio_output: true + default_volume: 1.0 + min_time_bewteen_play: 1 + speed_rate: 180 +base_config: + add_osd: false + extra_sensor: false + main_type: 2 + module_type: 0 + robot_name: UGV Rover + sbc_version: 0.93 + use_lidar: false +cmd_config: + cmd_arm_ctrl_ui: 144 + cmd_gimbal_base_ctrl: 141 + cmd_gimbal_ctrl: 133 + cmd_gimbal_steady: 137 + cmd_movition_ctrl: 1 + cmd_pwm_ctrl: 11 + cmd_servo_torque: 210 + cmd_set_servo_id: 501 + cmd_set_servo_mid: 502 +code: + base_ct: 10410 + base_of: 10407 + base_on: 10408 + cv_auto: 10307 + cv_clor: 10305 + cv_face: 10303 + cv_moti: 10302 + cv_none: 10301 + cv_objs: 10304 + head_ct: 10409 + led_aut: 10405 + led_off: 10404 + led_ton: 10406 + max_res: 10101 + mc_lock: 10501 + mc_unlo: 10502 + mid_res: 10102 + min_res: 10103 + mp_face: 10308 + mp_hand: 10306 + mp_pose: 10309 + pic_cap: 10201 + re_capt: 10402 + re_none: 10401 + re_reco: 10403 + release: 10902 + s_panid: 10901 + s_tilid: 10904 + set_mid: 10903 + vid_end: 10203 + vid_sta: 10202 + zoom_x1: 10104 + zoom_x2: 10105 + zoom_x4: 10106 +cv: + aimed_error: 8 + color_lower: + - 101 + - 50 + - 38 + color_upper: + - 110 + - 255 + - 255 + default_color: blue + min_radius: 12 + sampling_rad: 25 + track_acc_rate: 0.4 + track_color_iterate: 0.023 + track_faces_iterate: 0.045 + track_spd_rate: 60 +fb: + base_light: 115 + base_voltage: 112 + cpu_load: 106 + cpu_temp: 107 + cv_movtion_mode: 114 + detect_react: 103 + detect_type: 101 + led_mode: 102 + pan_angle: 109 + picture_size: 104 + ram_usage: 108 + tilt_angle: 110 + video_fps: 113 + video_size: 105 + wifi_rssi: 111 +sbc_config: + disabled_http_log: true + feedback_interval: 0.001 +video: + default_quality: 20 + default_res_h: 480 + default_res_w: 640 diff --git a/carsrun/qrcode.png b/carsrun/qrcode.png new file mode 100644 index 0000000..e9f5f4f Binary files /dev/null and b/carsrun/qrcode.png differ diff --git a/carsrun/static/Decoder.js b/carsrun/static/Decoder.js new file mode 100644 index 0000000..6f09f55 --- /dev/null +++ b/carsrun/static/Decoder.js @@ -0,0 +1,891 @@ +// universal module definition +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define([], factory); + } else if (typeof exports === 'object') { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(); + } else { + // Browser globals (root is window) + root.Decoder = factory(); + } +}(this, function () { + + var global; + + function initglobal(){ + global = this; + if (!global){ + if (typeof window != "undefined"){ + global = window; + }else if (typeof self != "undefined"){ + global = self; + }; + }; + }; + initglobal(); + + + function error(message) { + console.error(message); + console.trace(); + }; + + + function assert(condition, message) { + if (!condition) { + error(message); + }; + }; + + + + + var getModule = function(par_broadwayOnHeadersDecoded, par_broadwayOnPictureDecoded){ + + + /*var ModuleX = { + 'print': function(text) { console.log('stdout: ' + text); }, + 'printErr': function(text) { console.log('stderr: ' + text); } + };*/ + + + /* + + The reason why this is all packed into one file is that this file can also function as worker. + you can integrate the file into your build system and provide the original file to be loaded into a worker. + + */ + + //var Module = (function(){ + + +var Module=typeof Module!=="undefined"?Module:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}Module["arguments"]=[];Module["thisProgram"]="./this.program";Module["quit"]=(function(status,toThrow){throw toThrow});Module["preRun"]=[];Module["postRun"]=[];var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;if(Module["ENVIRONMENT"]){if(Module["ENVIRONMENT"]==="WEB"){ENVIRONMENT_IS_WEB=true}else if(Module["ENVIRONMENT"]==="WORKER"){ENVIRONMENT_IS_WORKER=true}else if(Module["ENVIRONMENT"]==="NODE"){ENVIRONMENT_IS_NODE=true}else if(Module["ENVIRONMENT"]==="SHELL"){ENVIRONMENT_IS_SHELL=true}else{throw new Error("Module['ENVIRONMENT'] value is not valid. must be one of: WEB|WORKER|NODE|SHELL.")}}else{ENVIRONMENT_IS_WEB=typeof window==="object";ENVIRONMENT_IS_WORKER=typeof importScripts==="function";ENVIRONMENT_IS_NODE=typeof process==="object"&&typeof null==="function"&&!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_WORKER;ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER}if(ENVIRONMENT_IS_NODE){var nodeFS;var nodePath;Module["read"]=function shell_read(filename,binary){var ret;if(!nodeFS)nodeFS=(null)("fs");if(!nodePath)nodePath=(null)("path");filename=nodePath["normalize"](filename);ret=nodeFS["readFileSync"](filename);return binary?ret:ret.toString()};Module["readBinary"]=function readBinary(filename){var ret=Module["read"](filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process["argv"].length>1){Module["thisProgram"]=process["argv"][1].replace(/\\/g,"/")}Module["arguments"]=process["argv"].slice(2);if(typeof module!=="undefined"){module["exports"]=Module}process["on"]("uncaughtException",(function(ex){if(!(ex instanceof ExitStatus)){throw ex}}));process["on"]("unhandledRejection",(function(reason,p){process["exit"](1)}));Module["inspect"]=(function(){return"[Emscripten Module object]"})}else if(ENVIRONMENT_IS_SHELL){if(typeof read!="undefined"){Module["read"]=function shell_read(f){return read(f)}}Module["readBinary"]=function readBinary(f){var data;if(typeof readbuffer==="function"){return new Uint8Array(readbuffer(f))}data=read(f,"binary");assert(typeof data==="object");return data};if(typeof scriptArgs!="undefined"){Module["arguments"]=scriptArgs}else if(typeof arguments!="undefined"){Module["arguments"]=arguments}if(typeof quit==="function"){Module["quit"]=(function(status,toThrow){quit(status)})}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){Module["read"]=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){Module["readBinary"]=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.responseType="arraybuffer";xhr.send(null);return new Uint8Array(xhr.response)}}Module["readAsync"]=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open("GET",url,true);xhr.responseType="arraybuffer";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)};Module["setWindowTitle"]=(function(title){document.title=title})}else{throw new Error("not compiled for this environment")}Module["print"]=typeof console!=="undefined"?console.log.bind(console):typeof print!=="undefined"?print:null;Module["printErr"]=typeof printErr!=="undefined"?printErr:typeof console!=="undefined"&&console.warn.bind(console)||Module["print"];Module.print=Module["print"];Module.printErr=Module["printErr"];for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=undefined;var STACK_ALIGN=16;function staticAlloc(size){assert(!staticSealed);var ret=STATICTOP;STATICTOP=STATICTOP+size+15&-16;return ret}function alignMemory(size,factor){if(!factor)factor=STACK_ALIGN;var ret=size=Math.ceil(size/factor)*factor;return ret}var asm2wasmImports={"f64-rem":(function(x,y){return x%y}),"debugger":(function(){debugger})};var functionPointers=new Array(0);var GLOBAL_BASE=1024;var ABORT=0;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort("Assertion failed: "+text)}}function Pointer_stringify(ptr,length){if(length===0||!ptr)return"";var hasUtf=0;var t;var i=0;while(1){t=HEAPU8[ptr+i>>0];hasUtf|=t;if(t==0&&!length)break;i++;if(length&&i==length)break}if(!length)length=i;var ret="";if(hasUtf<128){var MAX_CHUNK=1024;var curr;while(length>0){curr=String.fromCharCode.apply(String,HEAPU8.subarray(ptr,ptr+Math.min(length,MAX_CHUNK)));ret=ret?ret+curr:curr;ptr+=MAX_CHUNK;length-=MAX_CHUNK}return ret}return UTF8ToString(ptr)}var UTF8Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf8"):undefined;function UTF8ArrayToString(u8Array,idx){var endPtr=idx;while(u8Array[endPtr])++endPtr;if(endPtr-idx>16&&u8Array.subarray&&UTF8Decoder){return UTF8Decoder.decode(u8Array.subarray(idx,endPtr))}else{var u0,u1,u2,u3,u4,u5;var str="";while(1){u0=u8Array[idx++];if(!u0)return str;if(!(u0&128)){str+=String.fromCharCode(u0);continue}u1=u8Array[idx++]&63;if((u0&224)==192){str+=String.fromCharCode((u0&31)<<6|u1);continue}u2=u8Array[idx++]&63;if((u0&240)==224){u0=(u0&15)<<12|u1<<6|u2}else{u3=u8Array[idx++]&63;if((u0&248)==240){u0=(u0&7)<<18|u1<<12|u2<<6|u3}else{u4=u8Array[idx++]&63;if((u0&252)==248){u0=(u0&3)<<24|u1<<18|u2<<12|u3<<6|u4}else{u5=u8Array[idx++]&63;u0=(u0&1)<<30|u1<<24|u2<<18|u3<<12|u4<<6|u5}}}if(u0<65536){str+=String.fromCharCode(u0)}else{var ch=u0-65536;str+=String.fromCharCode(55296|ch>>10,56320|ch&1023)}}}}function UTF8ToString(ptr){return UTF8ArrayToString(HEAPU8,ptr)}var UTF16Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf-16le"):undefined;var WASM_PAGE_SIZE=65536;var ASMJS_PAGE_SIZE=16777216;function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBuffer(buf){Module["buffer"]=buffer=buf}function updateGlobalBufferViews(){Module["HEAP8"]=HEAP8=new Int8Array(buffer);Module["HEAP16"]=HEAP16=new Int16Array(buffer);Module["HEAP32"]=HEAP32=new Int32Array(buffer);Module["HEAPU8"]=HEAPU8=new Uint8Array(buffer);Module["HEAPU16"]=HEAPU16=new Uint16Array(buffer);Module["HEAPU32"]=HEAPU32=new Uint32Array(buffer);Module["HEAPF32"]=HEAPF32=new Float32Array(buffer);Module["HEAPF64"]=HEAPF64=new Float64Array(buffer)}var STATIC_BASE,STATICTOP,staticSealed;var STACK_BASE,STACKTOP,STACK_MAX;var DYNAMIC_BASE,DYNAMICTOP_PTR;STATIC_BASE=STATICTOP=STACK_BASE=STACKTOP=STACK_MAX=DYNAMIC_BASE=DYNAMICTOP_PTR=0;staticSealed=false;function abortOnCannotGrowMemory(){abort("Cannot enlarge memory arrays. Either (1) compile with -s TOTAL_MEMORY=X with X higher than the current value "+TOTAL_MEMORY+", (2) compile with -s ALLOW_MEMORY_GROWTH=1 which allows increasing the size at runtime, or (3) if you want malloc to return NULL (0) instead of this abort, compile with -s ABORTING_MALLOC=0 ")}function enlargeMemory(){abortOnCannotGrowMemory()}var TOTAL_STACK=Module["TOTAL_STACK"]||5242880;var TOTAL_MEMORY=Module["TOTAL_MEMORY"]||52428800;if(TOTAL_MEMORY0){var callback=callbacks.shift();if(typeof callback=="function"){callback();continue}var func=callback.func;if(typeof func==="number"){if(callback.arg===undefined){Module["dynCall_v"](func)}else{Module["dynCall_vi"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATEXIT__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module["preRun"]){if(typeof Module["preRun"]=="function")Module["preRun"]=[Module["preRun"]];while(Module["preRun"].length){addOnPreRun(Module["preRun"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function ensureInitRuntime(){if(runtimeInitialized)return;runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){callRuntimeCallbacks(__ATEXIT__);runtimeExited=true}function postRun(){if(Module["postRun"]){if(typeof Module["postRun"]=="function")Module["postRun"]=[Module["postRun"]];while(Module["postRun"].length){addOnPostRun(Module["postRun"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_abs=Math.abs;var Math_cos=Math.cos;var Math_sin=Math.sin;var Math_tan=Math.tan;var Math_acos=Math.acos;var Math_asin=Math.asin;var Math_atan=Math.atan;var Math_atan2=Math.atan2;var Math_exp=Math.exp;var Math_log=Math.log;var Math_sqrt=Math.sqrt;var Math_ceil=Math.ceil;var Math_floor=Math.floor;var Math_pow=Math.pow;var Math_imul=Math.imul;var Math_fround=Math.fround;var Math_round=Math.round;var Math_min=Math.min;var Math_max=Math.max;var Math_clz32=Math.clz32;var Math_trunc=Math.trunc;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module["preloadedImages"]={};Module["preloadedAudios"]={};var dataURIPrefix="data:application/octet-stream;base64,";function isDataURI(filename){return String.prototype.startsWith?filename.startsWith(dataURIPrefix):filename.indexOf(dataURIPrefix)===0}function integrateWasmJS(){var wasmTextFile="avc.wast";var wasmBinaryFile="avc.wasm";var asmjsCodeFile="avc.temp.asm.js";if(typeof Module["locateFile"]==="function"){if(!isDataURI(wasmTextFile)){wasmTextFile=Module["locateFile"](wasmTextFile)}if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=Module["locateFile"](wasmBinaryFile)}if(!isDataURI(asmjsCodeFile)){asmjsCodeFile=Module["locateFile"](asmjsCodeFile)}}var wasmPageSize=64*1024;var info={"global":null,"env":null,"asm2wasm":asm2wasmImports,"parent":Module};var exports=null;function mergeMemory(newBuffer){var oldBuffer=Module["buffer"];if(newBuffer.byteLength>2];return ret}),getStr:(function(){var ret=Pointer_stringify(SYSCALLS.get());return ret}),get64:(function(){var low=SYSCALLS.get(),high=SYSCALLS.get();if(low>=0)assert(high===0);else assert(high===-1);return low}),getZero:(function(){assert(SYSCALLS.get()===0)})};function ___syscall140(which,varargs){SYSCALLS.varargs=varargs;try{var stream=SYSCALLS.getStreamFromFD(),offset_high=SYSCALLS.get(),offset_low=SYSCALLS.get(),result=SYSCALLS.get(),whence=SYSCALLS.get();var offset=offset_low;FS.llseek(stream,offset,whence);HEAP32[result>>2]=stream.position;if(stream.getdents&&offset===0&&whence===0)stream.getdents=null;return 0}catch(e){if(typeof FS==="undefined"||!(e instanceof FS.ErrnoError))abort(e);return-e.errno}}function ___syscall146(which,varargs){SYSCALLS.varargs=varargs;try{var stream=SYSCALLS.get(),iov=SYSCALLS.get(),iovcnt=SYSCALLS.get();var ret=0;if(!___syscall146.buffers){___syscall146.buffers=[null,[],[]];___syscall146.printChar=(function(stream,curr){var buffer=___syscall146.buffers[stream];assert(buffer);if(curr===0||curr===10){(stream===1?Module["print"]:Module["printErr"])(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}})}for(var i=0;i>2];var len=HEAP32[iov+(i*8+4)>>2];for(var j=0;j>2]=value;return value}DYNAMICTOP_PTR=staticAlloc(4);STACK_BASE=STACKTOP=alignMemory(STATICTOP);STACK_MAX=STACK_BASE+TOTAL_STACK;DYNAMIC_BASE=alignMemory(STACK_MAX);HEAP32[DYNAMICTOP_PTR>>2]=DYNAMIC_BASE;staticSealed=true;Module["wasmTableSize"]=10;Module["wasmMaxTableSize"]=10;Module.asmGlobalArg={};Module.asmLibraryArg={"abort":abort,"enlargeMemory":enlargeMemory,"getTotalMemory":getTotalMemory,"abortOnCannotGrowMemory":abortOnCannotGrowMemory,"___setErrNo":___setErrNo,"___syscall140":___syscall140,"___syscall146":___syscall146,"___syscall54":___syscall54,"___syscall6":___syscall6,"_broadwayOnHeadersDecoded":_broadwayOnHeadersDecoded,"_broadwayOnPictureDecoded":_broadwayOnPictureDecoded,"_emscripten_memcpy_big":_emscripten_memcpy_big,"DYNAMICTOP_PTR":DYNAMICTOP_PTR,"STACKTOP":STACKTOP};var asm=Module["asm"](Module.asmGlobalArg,Module.asmLibraryArg,buffer);Module["asm"]=asm;var _broadwayCreateStream=Module["_broadwayCreateStream"]=(function(){return Module["asm"]["_broadwayCreateStream"].apply(null,arguments)});var _broadwayExit=Module["_broadwayExit"]=(function(){return Module["asm"]["_broadwayExit"].apply(null,arguments)});var _broadwayGetMajorVersion=Module["_broadwayGetMajorVersion"]=(function(){return Module["asm"]["_broadwayGetMajorVersion"].apply(null,arguments)});var _broadwayGetMinorVersion=Module["_broadwayGetMinorVersion"]=(function(){return Module["asm"]["_broadwayGetMinorVersion"].apply(null,arguments)});var _broadwayInit=Module["_broadwayInit"]=(function(){return Module["asm"]["_broadwayInit"].apply(null,arguments)});var _broadwayPlayStream=Module["_broadwayPlayStream"]=(function(){return Module["asm"]["_broadwayPlayStream"].apply(null,arguments)});Module["asm"]=asm;function ExitStatus(status){this.name="ExitStatus";this.message="Program terminated with exit("+status+")";this.status=status}ExitStatus.prototype=new Error;ExitStatus.prototype.constructor=ExitStatus;var initialStackTop;dependenciesFulfilled=function runCaller(){if(!Module["calledRun"])run();if(!Module["calledRun"])dependenciesFulfilled=runCaller};function run(args){args=args||Module["arguments"];if(runDependencies>0){return}preRun();if(runDependencies>0)return;if(Module["calledRun"])return;function doRun(){if(Module["calledRun"])return;Module["calledRun"]=true;if(ABORT)return;ensureInitRuntime();preMain();if(Module["onRuntimeInitialized"])Module["onRuntimeInitialized"]();postRun()}if(Module["setStatus"]){Module["setStatus"]("Running...");setTimeout((function(){setTimeout((function(){Module["setStatus"]("")}),1);doRun()}),1)}else{doRun()}}Module["run"]=run;function exit(status,implicit){if(implicit&&Module["noExitRuntime"]&&status===0){return}if(Module["noExitRuntime"]){}else{ABORT=true;EXITSTATUS=status;STACKTOP=initialStackTop;exitRuntime();if(Module["onExit"])Module["onExit"](status)}if(ENVIRONMENT_IS_NODE){process["exit"](status)}Module["quit"](status,new ExitStatus(status))}Module["exit"]=exit;function abort(what){if(Module["onAbort"]){Module["onAbort"](what)}if(what!==undefined){Module.print(what);Module.printErr(what);what=JSON.stringify(what)}else{what=""}ABORT=true;EXITSTATUS=1;throw"abort("+what+"). Build with -s ASSERTIONS=1 for more info."}Module["abort"]=abort;if(Module["preInit"]){if(typeof Module["preInit"]=="function")Module["preInit"]=[Module["preInit"]];while(Module["preInit"].length>0){Module["preInit"].pop()()}}Module["noExitRuntime"]=true;run() + + + + // return Module; + //})(); + + var resultModule; + if (typeof global !== "undefined"){ + if (global.Module){ + resultModule = global.Module; + }; + }; + if (typeof Module != "undefined"){ + resultModule = Module; + }; + + resultModule._broadwayOnHeadersDecoded = par_broadwayOnHeadersDecoded; + resultModule._broadwayOnPictureDecoded = par_broadwayOnPictureDecoded; + + var moduleIsReady = false; + var cbFun; + var moduleReady = function(){ + moduleIsReady = true; + if (cbFun){ + cbFun(resultModule); + } + }; + + resultModule.onRuntimeInitialized = function(){ + moduleReady(resultModule); + }; + return function(callback){ + if (moduleIsReady){ + callback(resultModule); + }else{ + cbFun = callback; + }; + }; + }; + + return (function(){ + "use strict"; + + + var nowValue = function(){ + return (new Date()).getTime(); + }; + + if (typeof performance != "undefined"){ + if (performance.now){ + nowValue = function(){ + return performance.now(); + }; + }; + }; + + + var Decoder = function(parOptions){ + this.options = parOptions || {}; + + this.now = nowValue; + + var asmInstance; + + var fakeWindow = { + }; + + var toU8Array; + var toU32Array; + + var onPicFun = function ($buffer, width, height) { + var buffer = this.pictureBuffers[$buffer]; + if (!buffer) { + buffer = this.pictureBuffers[$buffer] = toU8Array($buffer, (width * height * 3) / 2); + }; + + var infos; + var doInfo = false; + if (this.infoAr.length){ + doInfo = true; + infos = this.infoAr; + }; + this.infoAr = []; + + if (this.options.rgb){ + if (!asmInstance){ + asmInstance = getAsm(width, height); + }; + asmInstance.inp.set(buffer); + asmInstance.doit(); + + var copyU8 = new Uint8Array(asmInstance.outSize); + copyU8.set( asmInstance.out ); + + if (doInfo){ + infos[0].finishDecoding = nowValue(); + }; + + this.onPictureDecoded(copyU8, width, height, infos); + return; + + }; + + if (doInfo){ + infos[0].finishDecoding = nowValue(); + }; + this.onPictureDecoded(buffer, width, height, infos); + }.bind(this); + + var ignore = false; + + if (this.options.sliceMode){ + onPicFun = function ($buffer, width, height, $sliceInfo) { + if (ignore){ + return; + }; + var buffer = this.pictureBuffers[$buffer]; + if (!buffer) { + buffer = this.pictureBuffers[$buffer] = toU8Array($buffer, (width * height * 3) / 2); + }; + var sliceInfo = this.pictureBuffers[$sliceInfo]; + if (!sliceInfo) { + sliceInfo = this.pictureBuffers[$sliceInfo] = toU32Array($sliceInfo, 18); + }; + + var infos; + var doInfo = false; + if (this.infoAr.length){ + doInfo = true; + infos = this.infoAr; + }; + this.infoAr = []; + + /*if (this.options.rgb){ + + no rgb in slice mode + + };*/ + + infos[0].finishDecoding = nowValue(); + var sliceInfoAr = []; + for (var i = 0; i < 20; ++i){ + sliceInfoAr.push(sliceInfo[i]); + }; + infos[0].sliceInfoAr = sliceInfoAr; + + this.onPictureDecoded(buffer, width, height, infos); + }.bind(this); + }; + + var ModuleCallback = getModule.apply(fakeWindow, [function () { + }, onPicFun]); + + + var MAX_STREAM_BUFFER_LENGTH = 1024 * 1024; + + var instance = this; + this.onPictureDecoded = function (buffer, width, height, infos) { + + }; + + this.onDecoderReady = function(){}; + + var bufferedCalls = []; + this.decode = function decode(typedAr, parInfo, copyDoneFun) { + bufferedCalls.push([typedAr, parInfo, copyDoneFun]); + }; + + ModuleCallback(function(Module){ + var HEAP8 = Module.HEAP8; + var HEAPU8 = Module.HEAPU8; + var HEAP16 = Module.HEAP16; + var HEAP32 = Module.HEAP32; + // from old constructor + Module._broadwayInit(); + + /** + * Creates a typed array from a HEAP8 pointer. + */ + toU8Array = function(ptr, length) { + return HEAPU8.subarray(ptr, ptr + length); + }; + toU32Array = function(ptr, length) { + //var tmp = HEAPU8.subarray(ptr, ptr + (length * 4)); + return new Uint32Array(HEAPU8.buffer, ptr, length); + }; + instance.streamBuffer = toU8Array(Module._broadwayCreateStream(MAX_STREAM_BUFFER_LENGTH), MAX_STREAM_BUFFER_LENGTH); + instance.pictureBuffers = {}; + // collect extra infos that are provided with the nal units + instance.infoAr = []; + + /** + * Decodes a stream buffer. This may be one single (unframed) NAL unit without the + * start code, or a sequence of NAL units with framing start code prefixes. This + * function overwrites stream buffer allocated by the codec with the supplied buffer. + */ + + var sliceNum = 0; + if (instance.options.sliceMode){ + sliceNum = instance.options.sliceNum; + + instance.decode = function decode(typedAr, parInfo, copyDoneFun) { + instance.infoAr.push(parInfo); + parInfo.startDecoding = nowValue(); + var nals = parInfo.nals; + var i; + if (!nals){ + nals = []; + parInfo.nals = nals; + var l = typedAr.length; + var foundSomething = false; + var lastFound = 0; + var lastStart = 0; + for (i = 0; i < l; ++i){ + if (typedAr[i] === 1){ + if ( + typedAr[i - 1] === 0 && + typedAr[i - 2] === 0 + ){ + var startPos = i - 2; + if (typedAr[i - 3] === 0){ + startPos = i - 3; + }; + // its a nal; + if (foundSomething){ + nals.push({ + offset: lastFound, + end: startPos, + type: typedAr[lastStart] & 31 + }); + }; + lastFound = startPos; + lastStart = startPos + 3; + if (typedAr[i - 3] === 0){ + lastStart = startPos + 4; + }; + foundSomething = true; + }; + }; + }; + if (foundSomething){ + nals.push({ + offset: lastFound, + end: i, + type: typedAr[lastStart] & 31 + }); + }; + }; + + var currentSlice = 0; + var playAr; + var offset = 0; + for (i = 0; i < nals.length; ++i){ + if (nals[i].type === 1 || nals[i].type === 5){ + if (currentSlice === sliceNum){ + playAr = typedAr.subarray(nals[i].offset, nals[i].end); + instance.streamBuffer[offset] = 0; + offset += 1; + instance.streamBuffer.set(playAr, offset); + offset += playAr.length; + }; + currentSlice += 1; + }else{ + playAr = typedAr.subarray(nals[i].offset, nals[i].end); + instance.streamBuffer[offset] = 0; + offset += 1; + instance.streamBuffer.set(playAr, offset); + offset += playAr.length; + Module._broadwayPlayStream(offset); + offset = 0; + }; + }; + copyDoneFun(); + Module._broadwayPlayStream(offset); + }; + + }else{ + instance.decode = function decode(typedAr, parInfo) { + // console.info("Decoding: " + buffer.length); + // collect infos + if (parInfo){ + instance.infoAr.push(parInfo); + parInfo.startDecoding = nowValue(); + }; + + instance.streamBuffer.set(typedAr); + Module._broadwayPlayStream(typedAr.length); + }; + }; + + if (bufferedCalls.length){ + var bi = 0; + for (bi = 0; bi < bufferedCalls.length; ++bi){ + instance.decode(bufferedCalls[bi][0], bufferedCalls[bi][1], bufferedCalls[bi][2]); + }; + bufferedCalls = []; + }; + + instance.onDecoderReady(instance); + + }); + + + }; + + + Decoder.prototype = { + + }; + + + + + /* + + asm.js implementation of a yuv to rgb convertor + provided by @soliton4 + + based on + http://www.wordsaretoys.com/2013/10/18/making-yuv-conversion-a-little-faster/ + + */ + + + // factory to create asm.js yuv -> rgb convertor for a given resolution + var asmInstances = {}; + var getAsm = function(parWidth, parHeight){ + var idStr = "" + parWidth + "x" + parHeight; + if (asmInstances[idStr]){ + return asmInstances[idStr]; + }; + + var lumaSize = parWidth * parHeight; + var chromaSize = (lumaSize|0) >> 2; + + var inpSize = lumaSize + chromaSize + chromaSize; + var outSize = parWidth * parHeight * 4; + var cacheSize = Math.pow(2, 24) * 4; + var size = inpSize + outSize + cacheSize; + + var chunkSize = Math.pow(2, 24); + var heapSize = chunkSize; + while (heapSize < size){ + heapSize += chunkSize; + }; + var heap = new ArrayBuffer(heapSize); + + var res = asmFactory(global, {}, heap); + res.init(parWidth, parHeight); + asmInstances[idStr] = res; + + res.heap = heap; + res.out = new Uint8Array(heap, 0, outSize); + res.inp = new Uint8Array(heap, outSize, inpSize); + res.outSize = outSize; + + return res; + }; + + + function asmFactory(stdlib, foreign, heap) { + "use asm"; + + var imul = stdlib.Math.imul; + var min = stdlib.Math.min; + var max = stdlib.Math.max; + var pow = stdlib.Math.pow; + var out = new stdlib.Uint8Array(heap); + var out32 = new stdlib.Uint32Array(heap); + var inp = new stdlib.Uint8Array(heap); + var mem = new stdlib.Uint8Array(heap); + var mem32 = new stdlib.Uint32Array(heap); + + // for double algo + /*var vt = 1.370705; + var gt = 0.698001; + var gt2 = 0.337633; + var bt = 1.732446;*/ + + var width = 0; + var height = 0; + var lumaSize = 0; + var chromaSize = 0; + var inpSize = 0; + var outSize = 0; + + var inpStart = 0; + var outStart = 0; + + var widthFour = 0; + + var cacheStart = 0; + + + function init(parWidth, parHeight){ + parWidth = parWidth|0; + parHeight = parHeight|0; + + var i = 0; + var s = 0; + + width = parWidth; + widthFour = imul(parWidth, 4)|0; + height = parHeight; + lumaSize = imul(width|0, height|0)|0; + chromaSize = (lumaSize|0) >> 2; + outSize = imul(imul(width, height)|0, 4)|0; + inpSize = ((lumaSize + chromaSize)|0 + chromaSize)|0; + + outStart = 0; + inpStart = (outStart + outSize)|0; + cacheStart = (inpStart + inpSize)|0; + + // initializing memory (to be on the safe side) + s = ~~(+pow(+2, +24)); + s = imul(s, 4)|0; + + for (i = 0|0; ((i|0) < (s|0))|0; i = (i + 4)|0){ + mem32[((cacheStart + i)|0) >> 2] = 0; + }; + }; + + function doit(){ + var ystart = 0; + var ustart = 0; + var vstart = 0; + + var y = 0; + var yn = 0; + var u = 0; + var v = 0; + + var o = 0; + + var line = 0; + var col = 0; + + var usave = 0; + var vsave = 0; + + var ostart = 0; + var cacheAdr = 0; + + ostart = outStart|0; + + ystart = inpStart|0; + ustart = (ystart + lumaSize|0)|0; + vstart = (ustart + chromaSize)|0; + + for (line = 0; (line|0) < (height|0); line = (line + 2)|0){ + usave = ustart; + vsave = vstart; + for (col = 0; (col|0) < (width|0); col = (col + 2)|0){ + y = inp[ystart >> 0]|0; + yn = inp[((ystart + width)|0) >> 0]|0; + + u = inp[ustart >> 0]|0; + v = inp[vstart >> 0]|0; + + cacheAdr = (((((y << 16)|0) + ((u << 8)|0))|0) + v)|0; + o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0; + if (o){}else{ + o = yuv2rgbcalc(y,u,v)|0; + mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0; + }; + mem32[ostart >> 2] = o; + + cacheAdr = (((((yn << 16)|0) + ((u << 8)|0))|0) + v)|0; + o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0; + if (o){}else{ + o = yuv2rgbcalc(yn,u,v)|0; + mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0; + }; + mem32[((ostart + widthFour)|0) >> 2] = o; + + //yuv2rgb5(y, u, v, ostart); + //yuv2rgb5(yn, u, v, (ostart + widthFour)|0); + ostart = (ostart + 4)|0; + + // next step only for y. u and v stay the same + ystart = (ystart + 1)|0; + y = inp[ystart >> 0]|0; + yn = inp[((ystart + width)|0) >> 0]|0; + + //yuv2rgb5(y, u, v, ostart); + cacheAdr = (((((y << 16)|0) + ((u << 8)|0))|0) + v)|0; + o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0; + if (o){}else{ + o = yuv2rgbcalc(y,u,v)|0; + mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0; + }; + mem32[ostart >> 2] = o; + + //yuv2rgb5(yn, u, v, (ostart + widthFour)|0); + cacheAdr = (((((yn << 16)|0) + ((u << 8)|0))|0) + v)|0; + o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0; + if (o){}else{ + o = yuv2rgbcalc(yn,u,v)|0; + mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0; + }; + mem32[((ostart + widthFour)|0) >> 2] = o; + ostart = (ostart + 4)|0; + + //all positions inc 1 + + ystart = (ystart + 1)|0; + ustart = (ustart + 1)|0; + vstart = (vstart + 1)|0; + }; + ostart = (ostart + widthFour)|0; + ystart = (ystart + width)|0; + + }; + + }; + + function yuv2rgbcalc(y, u, v){ + y = y|0; + u = u|0; + v = v|0; + + var r = 0; + var g = 0; + var b = 0; + + var o = 0; + + var a0 = 0; + var a1 = 0; + var a2 = 0; + var a3 = 0; + var a4 = 0; + + a0 = imul(1192, (y - 16)|0)|0; + a1 = imul(1634, (v - 128)|0)|0; + a2 = imul(832, (v - 128)|0)|0; + a3 = imul(400, (u - 128)|0)|0; + a4 = imul(2066, (u - 128)|0)|0; + + r = (((a0 + a1)|0) >> 10)|0; + g = (((((a0 - a2)|0) - a3)|0) >> 10)|0; + b = (((a0 + a4)|0) >> 10)|0; + + if ((((r & 255)|0) != (r|0))|0){ + r = min(255, max(0, r|0)|0)|0; + }; + if ((((g & 255)|0) != (g|0))|0){ + g = min(255, max(0, g|0)|0)|0; + }; + if ((((b & 255)|0) != (b|0))|0){ + b = min(255, max(0, b|0)|0)|0; + }; + + o = 255; + o = (o << 8)|0; + o = (o + b)|0; + o = (o << 8)|0; + o = (o + g)|0; + o = (o << 8)|0; + o = (o + r)|0; + + return o|0; + + }; + + + + return { + init: init, + doit: doit + }; + }; + + + /* + potential worker initialization + + */ + + + if (typeof self != "undefined"){ + var isWorker = false; + var decoder; + var reuseMemory = false; + var sliceMode = false; + var sliceNum = 0; + var sliceCnt = 0; + var lastSliceNum = 0; + var sliceInfoAr; + var lastBuf; + var awaiting = 0; + var pile = []; + var startDecoding; + var finishDecoding; + var timeDecoding; + + var memAr = []; + var getMem = function(length){ + if (memAr.length){ + var u = memAr.shift(); + while (u && u.byteLength !== length){ + u = memAr.shift(); + }; + if (u){ + return u; + }; + }; + return new ArrayBuffer(length); + }; + + var copySlice = function(source, target, infoAr, width, height){ + + var length = width * height; + var length4 = length / 4 + var plane2 = length; + var plane3 = length + length4; + + var copy16 = function(parBegin, parEnd){ + var i = 0; + for (i = 0; i < 16; ++i){ + var begin = parBegin + (width * i); + var end = parEnd + (width * i) + target.set(source.subarray(begin, end), begin); + }; + }; + var copy8 = function(parBegin, parEnd){ + var i = 0; + for (i = 0; i < 8; ++i){ + var begin = parBegin + ((width / 2) * i); + var end = parEnd + ((width / 2) * i) + target.set(source.subarray(begin, end), begin); + }; + }; + var copyChunk = function(begin, end){ + target.set(source.subarray(begin, end), begin); + }; + + var begin = infoAr[0]; + var end = infoAr[1]; + if (end > 0){ + copy16(begin, end); + copy8(infoAr[2], infoAr[3]); + copy8(infoAr[4], infoAr[5]); + }; + begin = infoAr[6]; + end = infoAr[7]; + if (end > 0){ + copy16(begin, end); + copy8(infoAr[8], infoAr[9]); + copy8(infoAr[10], infoAr[11]); + }; + + begin = infoAr[12]; + end = infoAr[15]; + if (end > 0){ + copyChunk(begin, end); + copyChunk(infoAr[13], infoAr[16]); + copyChunk(infoAr[14], infoAr[17]); + }; + + }; + + var sliceMsgFun = function(){}; + + var setSliceCnt = function(parSliceCnt){ + sliceCnt = parSliceCnt; + lastSliceNum = sliceCnt - 1; + }; + + + self.addEventListener('message', function(e) { + + if (isWorker){ + if (reuseMemory){ + if (e.data.reuse){ + memAr.push(e.data.reuse); + }; + }; + if (e.data.buf){ + if (sliceMode && awaiting !== 0){ + pile.push(e.data); + }else{ + decoder.decode( + new Uint8Array(e.data.buf, e.data.offset || 0, e.data.length), + e.data.info, + function(){ + if (sliceMode && sliceNum !== lastSliceNum){ + postMessage(e.data, [e.data.buf]); + }; + } + ); + }; + return; + }; + + if (e.data.slice){ + // update ref pic + var copyStart = nowValue(); + copySlice(new Uint8Array(e.data.slice), lastBuf, e.data.infos[0].sliceInfoAr, e.data.width, e.data.height); + // is it the one? then we need to update it + if (e.data.theOne){ + copySlice(lastBuf, new Uint8Array(e.data.slice), sliceInfoAr, e.data.width, e.data.height); + if (timeDecoding > e.data.infos[0].timeDecoding){ + e.data.infos[0].timeDecoding = timeDecoding; + }; + e.data.infos[0].timeCopy += (nowValue() - copyStart); + }; + // move on + postMessage(e.data, [e.data.slice]); + + // next frame in the pipe? + awaiting -= 1; + if (awaiting === 0 && pile.length){ + var data = pile.shift(); + decoder.decode( + new Uint8Array(data.buf, data.offset || 0, data.length), + data.info, + function(){ + if (sliceMode && sliceNum !== lastSliceNum){ + postMessage(data, [data.buf]); + }; + } + ); + }; + return; + }; + + if (e.data.setSliceCnt){ + setSliceCnt(e.data.sliceCnt); + return; + }; + + }else{ + if (e.data && e.data.type === "Broadway.js - Worker init"){ + isWorker = true; + decoder = new Decoder(e.data.options); + + if (e.data.options.sliceMode){ + reuseMemory = true; + sliceMode = true; + sliceNum = e.data.options.sliceNum; + setSliceCnt(e.data.options.sliceCnt); + + decoder.onPictureDecoded = function (buffer, width, height, infos) { + + // buffer needs to be copied because we give up ownership + var copyU8 = new Uint8Array(getMem(buffer.length)); + copySlice(buffer, copyU8, infos[0].sliceInfoAr, width, height); + + startDecoding = infos[0].startDecoding; + finishDecoding = infos[0].finishDecoding; + timeDecoding = finishDecoding - startDecoding; + infos[0].timeDecoding = timeDecoding; + infos[0].timeCopy = 0; + + postMessage({ + slice: copyU8.buffer, + sliceNum: sliceNum, + width: width, + height: height, + infos: infos + }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership + + awaiting = sliceCnt - 1; + + lastBuf = buffer; + sliceInfoAr = infos[0].sliceInfoAr; + + }; + + }else if (e.data.options.reuseMemory){ + reuseMemory = true; + decoder.onPictureDecoded = function (buffer, width, height, infos) { + + // buffer needs to be copied because we give up ownership + var copyU8 = new Uint8Array(getMem(buffer.length)); + copyU8.set( buffer, 0, buffer.length ); + + postMessage({ + buf: copyU8.buffer, + length: buffer.length, + width: width, + height: height, + infos: infos + }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership + + }; + + }else{ + decoder.onPictureDecoded = function (buffer, width, height, infos) { + if (buffer) { + buffer = new Uint8Array(buffer); + }; + + // buffer needs to be copied because we give up ownership + var copyU8 = new Uint8Array(buffer.length); + copyU8.set( buffer, 0, buffer.length ); + + postMessage({ + buf: copyU8.buffer, + length: buffer.length, + width: width, + height: height, + infos: infos + }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership + + }; + }; + postMessage({ consoleLog: "broadway worker initialized" }); + }; + }; + + + }, false); + }; + + Decoder.nowValue = nowValue; + + return Decoder; + + })(); + + +})); + diff --git a/carsrun/static/Player.js b/carsrun/static/Player.js new file mode 100644 index 0000000..ff2e649 --- /dev/null +++ b/carsrun/static/Player.js @@ -0,0 +1,335 @@ +/* + + +usage: + +p = new Player({ + useWorker: , + workerFile: // give path to Decoder.js + webgl: true | false | "auto" // defaults to "auto" +}); + +// canvas property represents the canvas node +// put it somewhere in the dom +p.canvas; + +p.webgl; // contains the used rendering mode. if you pass auto to webgl you can see what auto detection resulted in + +p.decode(); + + +*/ + + + +// universal module definition +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(["./Decoder", "./YUVCanvas"], factory); + } else if (typeof exports === 'object') { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(require("./Decoder"), require("./YUVCanvas")); + } else { + // Browser globals (root is window) + root.Player = factory(root.Decoder, root.YUVCanvas); + } +}(this, function (Decoder, WebGLCanvas) { + "use strict"; + + + var nowValue = Decoder.nowValue; + + + var Player = function(parOptions){ + var self = this; + this._config = parOptions || {}; + + this.render = true; + if (this._config.render === false){ + this.render = false; + }; + + this.nowValue = nowValue; + + this._config.workerFile = this._config.workerFile || "Decoder.js"; + if (this._config.preserveDrawingBuffer){ + this._config.contextOptions = this._config.contextOptions || {}; + this._config.contextOptions.preserveDrawingBuffer = true; + }; + + var webgl = "auto"; + if (this._config.webgl === true){ + webgl = true; + }else if (this._config.webgl === false){ + webgl = false; + }; + + if (webgl == "auto"){ + webgl = true; + try{ + if (!window.WebGLRenderingContext) { + // the browser doesn't even know what WebGL is + webgl = false; + } else { + var canvas = document.createElement('canvas'); + var ctx = canvas.getContext("webgl"); + if (!ctx) { + // browser supports WebGL but initialization failed. + webgl = false; + }; + }; + }catch(e){ + webgl = false; + }; + }; + + this.webgl = webgl; + + // choose functions + if (this.webgl){ + this.createCanvasObj = this.createCanvasWebGL; + this.renderFrame = this.renderFrameWebGL; + }else{ + this.createCanvasObj = this.createCanvasRGB; + this.renderFrame = this.renderFrameRGB; + }; + + + var lastWidth; + var lastHeight; + var onPictureDecoded = function(buffer, width, height, infos) { + self.onPictureDecoded(buffer, width, height, infos); + + var startTime = nowValue(); + + if (!buffer || !self.render) { + return; + }; + + self.renderFrame({ + canvasObj: self.canvasObj, + data: buffer, + width: width, + height: height + }); + + if (self.onRenderFrameComplete){ + self.onRenderFrameComplete({ + data: buffer, + width: width, + height: height, + infos: infos, + canvasObj: self.canvasObj + }); + }; + + }; + + // provide size + + if (!this._config.size){ + this._config.size = {}; + }; + this._config.size.width = this._config.size.width || 200; + this._config.size.height = this._config.size.height || 200; + + if (this._config.useWorker){ + var worker = new Worker(this._config.workerFile); + this.worker = worker; + worker.addEventListener('message', function(e) { + var data = e.data; + if (data.consoleLog){ + console.log(data.consoleLog); + return; + }; + + onPictureDecoded.call(self, new Uint8Array(data.buf, 0, data.length), data.width, data.height, data.infos); + + }, false); + + worker.postMessage({type: "Broadway.js - Worker init", options: { + rgb: !webgl, + memsize: this.memsize, + reuseMemory: this._config.reuseMemory ? true : false + }}); + + if (this._config.transferMemory){ + this.decode = function(parData, parInfo){ + // no copy + // instead we are transfering the ownership of the buffer + // dangerous!!! + + worker.postMessage({buf: parData.buffer, offset: parData.byteOffset, length: parData.length, info: parInfo}, [parData.buffer]); // Send data to our worker. + }; + + }else{ + this.decode = function(parData, parInfo){ + // Copy the sample so that we only do a structured clone of the + // region of interest + var copyU8 = new Uint8Array(parData.length); + copyU8.set( parData, 0, parData.length ); + worker.postMessage({buf: copyU8.buffer, offset: 0, length: parData.length, info: parInfo}, [copyU8.buffer]); // Send data to our worker. + }; + + }; + + if (this._config.reuseMemory){ + this.recycleMemory = function(parArray){ + //this.beforeRecycle(); + worker.postMessage({reuse: parArray.buffer}, [parArray.buffer]); // Send data to our worker. + //this.afterRecycle(); + }; + } + + }else{ + + this.decoder = new Decoder({ + rgb: !webgl + }); + this.decoder.onPictureDecoded = onPictureDecoded; + + this.decode = function(parData, parInfo){ + self.decoder.decode(parData, parInfo); + }; + + }; + + + + if (this.render){ + this.canvasObj = this.createCanvasObj({ + contextOptions: this._config.contextOptions + }); + this.canvas = this.canvasObj.canvas; + }; + + this.domNode = this.canvas; + + lastWidth = this._config.size.width; + lastHeight = this._config.size.height; + + }; + + Player.prototype = { + + onPictureDecoded: function(buffer, width, height, infos){}, + + // call when memory of decoded frames is not used anymore + recycleMemory: function(buf){ + }, + /*beforeRecycle: function(){}, + afterRecycle: function(){},*/ + + // for both functions options is: + // + // width + // height + // enableScreenshot + // + // returns a object that has a property canvas which is a html5 canvas + createCanvasWebGL: function(options){ + var canvasObj = this._createBasicCanvasObj(options); + canvasObj.contextOptions = options.contextOptions; + return canvasObj; + }, + + createCanvasRGB: function(options){ + var canvasObj = this._createBasicCanvasObj(options); + return canvasObj; + }, + + // part that is the same for webGL and RGB + _createBasicCanvasObj: function(options){ + options = options || {}; + + var obj = {}; + var width = options.width; + if (!width){ + width = this._config.size.width; + }; + var height = options.height; + if (!height){ + height = this._config.size.height; + }; + obj.canvas = document.createElement('canvas'); + obj.canvas.width = width; + obj.canvas.height = height; + obj.canvas.style.backgroundColor = "#0D0E1B"; + + + return obj; + }, + + // options: + // + // canvas + // data + renderFrameWebGL: function(options){ + + var canvasObj = options.canvasObj; + + var width = options.width || canvasObj.canvas.width; + var height = options.height || canvasObj.canvas.height; + + if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height || !canvasObj.webGLCanvas){ + canvasObj.canvas.width = width; + canvasObj.canvas.height = height; + canvasObj.webGLCanvas = new WebGLCanvas({ + canvas: canvasObj.canvas, + contextOptions: canvasObj.contextOptions, + width: width, + height: height + }); + }; + + var ylen = width * height; + var uvlen = (width / 2) * (height / 2); + + canvasObj.webGLCanvas.drawNextOutputPicture({ + yData: options.data.subarray(0, ylen), + uData: options.data.subarray(ylen, ylen + uvlen), + vData: options.data.subarray(ylen + uvlen, ylen + uvlen + uvlen) + }); + + var self = this; + self.recycleMemory(options.data); + + }, + renderFrameRGB: function(options){ + var canvasObj = options.canvasObj; + + var width = options.width || canvasObj.canvas.width; + var height = options.height || canvasObj.canvas.height; + + if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height){ + canvasObj.canvas.width = width; + canvasObj.canvas.height = height; + }; + + var ctx = canvasObj.ctx; + var imgData = canvasObj.imgData; + + if (!ctx){ + canvasObj.ctx = canvasObj.canvas.getContext('2d'); + ctx = canvasObj.ctx; + + canvasObj.imgData = ctx.createImageData(width, height); + imgData = canvasObj.imgData; + }; + + imgData.data.set(options.data); + ctx.putImageData(imgData, 0, 0); + var self = this; + self.recycleMemory(options.data); + + } + + }; + + return Player; + +})); + diff --git a/carsrun/static/YUVCanvas.js b/carsrun/static/YUVCanvas.js new file mode 100644 index 0000000..1bca8a3 --- /dev/null +++ b/carsrun/static/YUVCanvas.js @@ -0,0 +1,551 @@ +// +// Copyright (c) 2015 Paperspace Co. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +// IN THE SOFTWARE. +// + + +// universal module definition +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define([], factory); + } else if (typeof exports === 'object') { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(); + } else { + // Browser globals (root is window) + root.YUVCanvas = factory(); + } +}(this, function () { + + +/** + * This class can be used to render output pictures from an H264bsdDecoder to a canvas element. + * If available the content is rendered using WebGL. + */ + function YUVCanvas(parOptions) { + + parOptions = parOptions || {}; + + this.canvasElement = parOptions.canvas || document.createElement("canvas"); + this.contextOptions = parOptions.contextOptions; + + this.type = parOptions.type || "yuv420"; + + this.customYUV444 = parOptions.customYUV444; + + this.conversionType = parOptions.conversionType || "rec601"; + + this.width = parOptions.width || 640; + this.height = parOptions.height || 320; + + this.animationTime = parOptions.animationTime || 0; + + this.canvasElement.width = this.width; + this.canvasElement.height = this.height; + + this.initContextGL(); + + if(this.contextGL) { + this.initProgram(); + this.initBuffers(); + this.initTextures(); + }; + + +/** + * Draw the next output picture using WebGL + */ + if (this.type === "yuv420"){ + this.drawNextOuptutPictureGL = function(par) { + var gl = this.contextGL; + var texturePosBuffer = this.texturePosBuffer; + var uTexturePosBuffer = this.uTexturePosBuffer; + var vTexturePosBuffer = this.vTexturePosBuffer; + + var yTextureRef = this.yTextureRef; + var uTextureRef = this.uTextureRef; + var vTextureRef = this.vTextureRef; + + var yData = par.yData; + var uData = par.uData; + var vData = par.vData; + + var width = this.width; + var height = this.height; + + var yDataPerRow = par.yDataPerRow || width; + var yRowCnt = par.yRowCnt || height; + + var uDataPerRow = par.uDataPerRow || (width / 2); + var uRowCnt = par.uRowCnt || (height / 2); + + var vDataPerRow = par.vDataPerRow || uDataPerRow; + var vRowCnt = par.vRowCnt || uRowCnt; + + gl.viewport(0, 0, width, height); + + var tTop = 0; + var tLeft = 0; + var tBottom = height / yRowCnt; + var tRight = width / yDataPerRow; + var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); + + gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW); + + if (this.customYUV444){ + tBottom = height / uRowCnt; + tRight = width / uDataPerRow; + }else{ + tBottom = (height / 2) / uRowCnt; + tRight = (width / 2) / uDataPerRow; + }; + var uTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); + + gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, uTexturePosValues, gl.DYNAMIC_DRAW); + + + if (this.customYUV444){ + tBottom = height / vRowCnt; + tRight = width / vDataPerRow; + }else{ + tBottom = (height / 2) / vRowCnt; + tRight = (width / 2) / vDataPerRow; + }; + var vTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); + + gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, vTexturePosValues, gl.DYNAMIC_DRAW); + + + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, yTextureRef); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, yDataPerRow, yRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, yData); + + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, uTextureRef); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, uDataPerRow, uRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uData); + + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, vTextureRef); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, vDataPerRow, vRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, vData); + + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + }; + + }else if (this.type === "yuv422"){ + this.drawNextOuptutPictureGL = function(par) { + var gl = this.contextGL; + var texturePosBuffer = this.texturePosBuffer; + + var textureRef = this.textureRef; + + var data = par.data; + + var width = this.width; + var height = this.height; + + var dataPerRow = par.dataPerRow || (width * 2); + var rowCnt = par.rowCnt || height; + + gl.viewport(0, 0, width, height); + + var tTop = 0; + var tLeft = 0; + var tBottom = height / rowCnt; + var tRight = width / (dataPerRow / 2); + var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); + + gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW); + + gl.uniform2f(gl.getUniformLocation(this.shaderProgram, 'resolution'), dataPerRow, height); + + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, textureRef); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, dataPerRow, rowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data); + + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + }; + }; + + }; + + /** + * Returns true if the canvas supports WebGL + */ + YUVCanvas.prototype.isWebGL = function() { + return this.contextGL; + }; + + /** + * Create the GL context from the canvas element + */ + YUVCanvas.prototype.initContextGL = function() { + var canvas = this.canvasElement; + var gl = null; + + var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"]; + var nameIndex = 0; + + while(!gl && nameIndex < validContextNames.length) { + var contextName = validContextNames[nameIndex]; + + try { + if (this.contextOptions){ + gl = canvas.getContext(contextName, this.contextOptions); + }else{ + gl = canvas.getContext(contextName); + }; + } catch (e) { + gl = null; + } + + if(!gl || typeof gl.getParameter !== "function") { + gl = null; + } + + ++nameIndex; + }; + + this.contextGL = gl; + }; + +/** + * Initialize GL shader program + */ +YUVCanvas.prototype.initProgram = function() { + var gl = this.contextGL; + + // vertex shader is the same for all types + var vertexShaderScript; + var fragmentShaderScript; + + if (this.type === "yuv420"){ + + vertexShaderScript = [ + 'attribute vec4 vertexPos;', + 'attribute vec4 texturePos;', + 'attribute vec4 uTexturePos;', + 'attribute vec4 vTexturePos;', + 'varying vec2 textureCoord;', + 'varying vec2 uTextureCoord;', + 'varying vec2 vTextureCoord;', + + 'void main()', + '{', + ' gl_Position = vertexPos;', + ' textureCoord = texturePos.xy;', + ' uTextureCoord = uTexturePos.xy;', + ' vTextureCoord = vTexturePos.xy;', + '}' + ].join('\n'); + + fragmentShaderScript = [ + 'precision highp float;', + 'varying highp vec2 textureCoord;', + 'varying highp vec2 uTextureCoord;', + 'varying highp vec2 vTextureCoord;', + 'uniform sampler2D ySampler;', + 'uniform sampler2D uSampler;', + 'uniform sampler2D vSampler;', + 'uniform mat4 YUV2RGB;', + + 'void main(void) {', + ' highp float y = texture2D(ySampler, textureCoord).r;', + ' highp float u = texture2D(uSampler, uTextureCoord).r;', + ' highp float v = texture2D(vSampler, vTextureCoord).r;', + ' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;', + '}' + ].join('\n'); + + }else if (this.type === "yuv422"){ + vertexShaderScript = [ + 'attribute vec4 vertexPos;', + 'attribute vec4 texturePos;', + 'varying vec2 textureCoord;', + + 'void main()', + '{', + ' gl_Position = vertexPos;', + ' textureCoord = texturePos.xy;', + '}' + ].join('\n'); + + fragmentShaderScript = [ + 'precision highp float;', + 'varying highp vec2 textureCoord;', + 'uniform sampler2D sampler;', + 'uniform highp vec2 resolution;', + 'uniform mat4 YUV2RGB;', + + 'void main(void) {', + + ' highp float texPixX = 1.0 / resolution.x;', + ' highp float logPixX = 2.0 / resolution.x;', // half the resolution of the texture + ' highp float logHalfPixX = 4.0 / resolution.x;', // half of the logical resolution so every 4th pixel + ' highp float steps = floor(textureCoord.x / logPixX);', + ' highp float uvSteps = floor(textureCoord.x / logHalfPixX);', + ' highp float y = texture2D(sampler, vec2((logPixX * steps) + texPixX, textureCoord.y)).r;', + ' highp float u = texture2D(sampler, vec2((logHalfPixX * uvSteps), textureCoord.y)).r;', + ' highp float v = texture2D(sampler, vec2((logHalfPixX * uvSteps) + texPixX + texPixX, textureCoord.y)).r;', + + //' highp float y = texture2D(sampler, textureCoord).r;', + //' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;', + ' gl_FragColor = vec4(y, u, v, 1.0) * YUV2RGB;', + '}' + ].join('\n'); + }; + + var YUV2RGB = []; + + if (this.conversionType == "rec709") { + // ITU-T Rec. 709 + YUV2RGB = [ + 1.16438, 0.00000, 1.79274, -0.97295, + 1.16438, -0.21325, -0.53291, 0.30148, + 1.16438, 2.11240, 0.00000, -1.13340, + 0, 0, 0, 1, + ]; + } else { + // assume ITU-T Rec. 601 + YUV2RGB = [ + 1.16438, 0.00000, 1.59603, -0.87079, + 1.16438, -0.39176, -0.81297, 0.52959, + 1.16438, 2.01723, 0.00000, -1.08139, + 0, 0, 0, 1 + ]; + }; + + var vertexShader = gl.createShader(gl.VERTEX_SHADER); + gl.shaderSource(vertexShader, vertexShaderScript); + gl.compileShader(vertexShader); + if(!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) { + console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader)); + } + + var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); + gl.shaderSource(fragmentShader, fragmentShaderScript); + gl.compileShader(fragmentShader); + if(!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) { + console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader)); + } + + var program = gl.createProgram(); + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + if(!gl.getProgramParameter(program, gl.LINK_STATUS)) { + console.log('Program failed to compile: ' + gl.getProgramInfoLog(program)); + } + + gl.useProgram(program); + + var YUV2RGBRef = gl.getUniformLocation(program, 'YUV2RGB'); + gl.uniformMatrix4fv(YUV2RGBRef, false, YUV2RGB); + + this.shaderProgram = program; +}; + +/** + * Initialize vertex buffers and attach to shader program + */ +YUVCanvas.prototype.initBuffers = function() { + var gl = this.contextGL; + var program = this.shaderProgram; + + var vertexPosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW); + + var vertexPosRef = gl.getAttribLocation(program, 'vertexPos'); + gl.enableVertexAttribArray(vertexPosRef); + gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0); + + if (this.animationTime){ + + var animationTime = this.animationTime; + var timePassed = 0; + var stepTime = 15; + + var aniFun = function(){ + + timePassed += stepTime; + var mul = ( 1 * timePassed ) / animationTime; + + if (timePassed >= animationTime){ + mul = 1; + }else{ + setTimeout(aniFun, stepTime); + }; + + var neg = -1 * mul; + var pos = 1 * mul; + + var vertexPosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([pos, pos, neg, pos, pos, neg, neg, neg]), gl.STATIC_DRAW); + + var vertexPosRef = gl.getAttribLocation(program, 'vertexPos'); + gl.enableVertexAttribArray(vertexPosRef); + gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0); + + try{ + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + }catch(e){}; + + }; + aniFun(); + + }; + + + + var texturePosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); + + var texturePosRef = gl.getAttribLocation(program, 'texturePos'); + gl.enableVertexAttribArray(texturePosRef); + gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0); + + this.texturePosBuffer = texturePosBuffer; + + if (this.type === "yuv420"){ + var uTexturePosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); + + var uTexturePosRef = gl.getAttribLocation(program, 'uTexturePos'); + gl.enableVertexAttribArray(uTexturePosRef); + gl.vertexAttribPointer(uTexturePosRef, 2, gl.FLOAT, false, 0, 0); + + this.uTexturePosBuffer = uTexturePosBuffer; + + + var vTexturePosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); + + var vTexturePosRef = gl.getAttribLocation(program, 'vTexturePos'); + gl.enableVertexAttribArray(vTexturePosRef); + gl.vertexAttribPointer(vTexturePosRef, 2, gl.FLOAT, false, 0, 0); + + this.vTexturePosBuffer = vTexturePosBuffer; + }; + +}; + +/** + * Initialize GL textures and attach to shader program + */ +YUVCanvas.prototype.initTextures = function() { + var gl = this.contextGL; + var program = this.shaderProgram; + + if (this.type === "yuv420"){ + + var yTextureRef = this.initTexture(); + var ySamplerRef = gl.getUniformLocation(program, 'ySampler'); + gl.uniform1i(ySamplerRef, 0); + this.yTextureRef = yTextureRef; + + var uTextureRef = this.initTexture(); + var uSamplerRef = gl.getUniformLocation(program, 'uSampler'); + gl.uniform1i(uSamplerRef, 1); + this.uTextureRef = uTextureRef; + + var vTextureRef = this.initTexture(); + var vSamplerRef = gl.getUniformLocation(program, 'vSampler'); + gl.uniform1i(vSamplerRef, 2); + this.vTextureRef = vTextureRef; + + }else if (this.type === "yuv422"){ + // only one texture for 422 + var textureRef = this.initTexture(); + var samplerRef = gl.getUniformLocation(program, 'sampler'); + gl.uniform1i(samplerRef, 0); + this.textureRef = textureRef; + + }; +}; + +/** + * Create and configure a single texture + */ +YUVCanvas.prototype.initTexture = function() { + var gl = this.contextGL; + + var textureRef = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, textureRef); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.bindTexture(gl.TEXTURE_2D, null); + + return textureRef; +}; + +/** + * Draw picture data to the canvas. + * If this object is using WebGL, the data must be an I420 formatted ArrayBuffer, + * Otherwise, data must be an RGBA formatted ArrayBuffer. + */ +YUVCanvas.prototype.drawNextOutputPicture = function(width, height, croppingParams, data) { + var gl = this.contextGL; + + if(gl) { + this.drawNextOuptutPictureGL(width, height, croppingParams, data); + } else { + this.drawNextOuptutPictureRGBA(width, height, croppingParams, data); + } +}; + + + +/** + * Draw next output picture using ARGB data on a 2d canvas. + */ +YUVCanvas.prototype.drawNextOuptutPictureRGBA = function(width, height, croppingParams, data) { + var canvas = this.canvasElement; + + var croppingParams = null; + + var argbData = data; + + var ctx = canvas.getContext('2d'); + var imageData = ctx.getImageData(0, 0, width, height); + imageData.data.set(argbData); + + if(croppingParams === null) { + ctx.putImageData(imageData, 0, 0); + } else { + ctx.putImageData(imageData, -croppingParams.left, -croppingParams.top, 0, 0, croppingParams.width, croppingParams.height); + } +}; + + return YUVCanvas; + +})); diff --git a/carsrun/static/avc.wasm b/carsrun/static/avc.wasm new file mode 100644 index 0000000..378ac32 Binary files /dev/null and b/carsrun/static/avc.wasm differ diff --git a/carsrun/static/style.css b/carsrun/static/style.css new file mode 100644 index 0000000..6030dec --- /dev/null +++ b/carsrun/static/style.css @@ -0,0 +1,9 @@ + + +body{ + background: white; + color: black; + padding:1%; + text-align: center; +} + diff --git a/carsrun/templates/Decoder.js b/carsrun/templates/Decoder.js new file mode 100644 index 0000000..6f09f55 --- /dev/null +++ b/carsrun/templates/Decoder.js @@ -0,0 +1,891 @@ +// universal module definition +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define([], factory); + } else if (typeof exports === 'object') { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(); + } else { + // Browser globals (root is window) + root.Decoder = factory(); + } +}(this, function () { + + var global; + + function initglobal(){ + global = this; + if (!global){ + if (typeof window != "undefined"){ + global = window; + }else if (typeof self != "undefined"){ + global = self; + }; + }; + }; + initglobal(); + + + function error(message) { + console.error(message); + console.trace(); + }; + + + function assert(condition, message) { + if (!condition) { + error(message); + }; + }; + + + + + var getModule = function(par_broadwayOnHeadersDecoded, par_broadwayOnPictureDecoded){ + + + /*var ModuleX = { + 'print': function(text) { console.log('stdout: ' + text); }, + 'printErr': function(text) { console.log('stderr: ' + text); } + };*/ + + + /* + + The reason why this is all packed into one file is that this file can also function as worker. + you can integrate the file into your build system and provide the original file to be loaded into a worker. + + */ + + //var Module = (function(){ + + +var Module=typeof Module!=="undefined"?Module:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}Module["arguments"]=[];Module["thisProgram"]="./this.program";Module["quit"]=(function(status,toThrow){throw toThrow});Module["preRun"]=[];Module["postRun"]=[];var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;if(Module["ENVIRONMENT"]){if(Module["ENVIRONMENT"]==="WEB"){ENVIRONMENT_IS_WEB=true}else if(Module["ENVIRONMENT"]==="WORKER"){ENVIRONMENT_IS_WORKER=true}else if(Module["ENVIRONMENT"]==="NODE"){ENVIRONMENT_IS_NODE=true}else if(Module["ENVIRONMENT"]==="SHELL"){ENVIRONMENT_IS_SHELL=true}else{throw new Error("Module['ENVIRONMENT'] value is not valid. must be one of: WEB|WORKER|NODE|SHELL.")}}else{ENVIRONMENT_IS_WEB=typeof window==="object";ENVIRONMENT_IS_WORKER=typeof importScripts==="function";ENVIRONMENT_IS_NODE=typeof process==="object"&&typeof null==="function"&&!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_WORKER;ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER}if(ENVIRONMENT_IS_NODE){var nodeFS;var nodePath;Module["read"]=function shell_read(filename,binary){var ret;if(!nodeFS)nodeFS=(null)("fs");if(!nodePath)nodePath=(null)("path");filename=nodePath["normalize"](filename);ret=nodeFS["readFileSync"](filename);return binary?ret:ret.toString()};Module["readBinary"]=function readBinary(filename){var ret=Module["read"](filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process["argv"].length>1){Module["thisProgram"]=process["argv"][1].replace(/\\/g,"/")}Module["arguments"]=process["argv"].slice(2);if(typeof module!=="undefined"){module["exports"]=Module}process["on"]("uncaughtException",(function(ex){if(!(ex instanceof ExitStatus)){throw ex}}));process["on"]("unhandledRejection",(function(reason,p){process["exit"](1)}));Module["inspect"]=(function(){return"[Emscripten Module object]"})}else if(ENVIRONMENT_IS_SHELL){if(typeof read!="undefined"){Module["read"]=function shell_read(f){return read(f)}}Module["readBinary"]=function readBinary(f){var data;if(typeof readbuffer==="function"){return new Uint8Array(readbuffer(f))}data=read(f,"binary");assert(typeof data==="object");return data};if(typeof scriptArgs!="undefined"){Module["arguments"]=scriptArgs}else if(typeof arguments!="undefined"){Module["arguments"]=arguments}if(typeof quit==="function"){Module["quit"]=(function(status,toThrow){quit(status)})}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){Module["read"]=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){Module["readBinary"]=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.responseType="arraybuffer";xhr.send(null);return new Uint8Array(xhr.response)}}Module["readAsync"]=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open("GET",url,true);xhr.responseType="arraybuffer";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)};Module["setWindowTitle"]=(function(title){document.title=title})}else{throw new Error("not compiled for this environment")}Module["print"]=typeof console!=="undefined"?console.log.bind(console):typeof print!=="undefined"?print:null;Module["printErr"]=typeof printErr!=="undefined"?printErr:typeof console!=="undefined"&&console.warn.bind(console)||Module["print"];Module.print=Module["print"];Module.printErr=Module["printErr"];for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=undefined;var STACK_ALIGN=16;function staticAlloc(size){assert(!staticSealed);var ret=STATICTOP;STATICTOP=STATICTOP+size+15&-16;return ret}function alignMemory(size,factor){if(!factor)factor=STACK_ALIGN;var ret=size=Math.ceil(size/factor)*factor;return ret}var asm2wasmImports={"f64-rem":(function(x,y){return x%y}),"debugger":(function(){debugger})};var functionPointers=new Array(0);var GLOBAL_BASE=1024;var ABORT=0;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort("Assertion failed: "+text)}}function Pointer_stringify(ptr,length){if(length===0||!ptr)return"";var hasUtf=0;var t;var i=0;while(1){t=HEAPU8[ptr+i>>0];hasUtf|=t;if(t==0&&!length)break;i++;if(length&&i==length)break}if(!length)length=i;var ret="";if(hasUtf<128){var MAX_CHUNK=1024;var curr;while(length>0){curr=String.fromCharCode.apply(String,HEAPU8.subarray(ptr,ptr+Math.min(length,MAX_CHUNK)));ret=ret?ret+curr:curr;ptr+=MAX_CHUNK;length-=MAX_CHUNK}return ret}return UTF8ToString(ptr)}var UTF8Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf8"):undefined;function UTF8ArrayToString(u8Array,idx){var endPtr=idx;while(u8Array[endPtr])++endPtr;if(endPtr-idx>16&&u8Array.subarray&&UTF8Decoder){return UTF8Decoder.decode(u8Array.subarray(idx,endPtr))}else{var u0,u1,u2,u3,u4,u5;var str="";while(1){u0=u8Array[idx++];if(!u0)return str;if(!(u0&128)){str+=String.fromCharCode(u0);continue}u1=u8Array[idx++]&63;if((u0&224)==192){str+=String.fromCharCode((u0&31)<<6|u1);continue}u2=u8Array[idx++]&63;if((u0&240)==224){u0=(u0&15)<<12|u1<<6|u2}else{u3=u8Array[idx++]&63;if((u0&248)==240){u0=(u0&7)<<18|u1<<12|u2<<6|u3}else{u4=u8Array[idx++]&63;if((u0&252)==248){u0=(u0&3)<<24|u1<<18|u2<<12|u3<<6|u4}else{u5=u8Array[idx++]&63;u0=(u0&1)<<30|u1<<24|u2<<18|u3<<12|u4<<6|u5}}}if(u0<65536){str+=String.fromCharCode(u0)}else{var ch=u0-65536;str+=String.fromCharCode(55296|ch>>10,56320|ch&1023)}}}}function UTF8ToString(ptr){return UTF8ArrayToString(HEAPU8,ptr)}var UTF16Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf-16le"):undefined;var WASM_PAGE_SIZE=65536;var ASMJS_PAGE_SIZE=16777216;function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBuffer(buf){Module["buffer"]=buffer=buf}function updateGlobalBufferViews(){Module["HEAP8"]=HEAP8=new Int8Array(buffer);Module["HEAP16"]=HEAP16=new Int16Array(buffer);Module["HEAP32"]=HEAP32=new Int32Array(buffer);Module["HEAPU8"]=HEAPU8=new Uint8Array(buffer);Module["HEAPU16"]=HEAPU16=new Uint16Array(buffer);Module["HEAPU32"]=HEAPU32=new Uint32Array(buffer);Module["HEAPF32"]=HEAPF32=new Float32Array(buffer);Module["HEAPF64"]=HEAPF64=new Float64Array(buffer)}var STATIC_BASE,STATICTOP,staticSealed;var STACK_BASE,STACKTOP,STACK_MAX;var DYNAMIC_BASE,DYNAMICTOP_PTR;STATIC_BASE=STATICTOP=STACK_BASE=STACKTOP=STACK_MAX=DYNAMIC_BASE=DYNAMICTOP_PTR=0;staticSealed=false;function abortOnCannotGrowMemory(){abort("Cannot enlarge memory arrays. Either (1) compile with -s TOTAL_MEMORY=X with X higher than the current value "+TOTAL_MEMORY+", (2) compile with -s ALLOW_MEMORY_GROWTH=1 which allows increasing the size at runtime, or (3) if you want malloc to return NULL (0) instead of this abort, compile with -s ABORTING_MALLOC=0 ")}function enlargeMemory(){abortOnCannotGrowMemory()}var TOTAL_STACK=Module["TOTAL_STACK"]||5242880;var TOTAL_MEMORY=Module["TOTAL_MEMORY"]||52428800;if(TOTAL_MEMORY0){var callback=callbacks.shift();if(typeof callback=="function"){callback();continue}var func=callback.func;if(typeof func==="number"){if(callback.arg===undefined){Module["dynCall_v"](func)}else{Module["dynCall_vi"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATEXIT__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module["preRun"]){if(typeof Module["preRun"]=="function")Module["preRun"]=[Module["preRun"]];while(Module["preRun"].length){addOnPreRun(Module["preRun"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function ensureInitRuntime(){if(runtimeInitialized)return;runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){callRuntimeCallbacks(__ATEXIT__);runtimeExited=true}function postRun(){if(Module["postRun"]){if(typeof Module["postRun"]=="function")Module["postRun"]=[Module["postRun"]];while(Module["postRun"].length){addOnPostRun(Module["postRun"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_abs=Math.abs;var Math_cos=Math.cos;var Math_sin=Math.sin;var Math_tan=Math.tan;var Math_acos=Math.acos;var Math_asin=Math.asin;var Math_atan=Math.atan;var Math_atan2=Math.atan2;var Math_exp=Math.exp;var Math_log=Math.log;var Math_sqrt=Math.sqrt;var Math_ceil=Math.ceil;var Math_floor=Math.floor;var Math_pow=Math.pow;var Math_imul=Math.imul;var Math_fround=Math.fround;var Math_round=Math.round;var Math_min=Math.min;var Math_max=Math.max;var Math_clz32=Math.clz32;var Math_trunc=Math.trunc;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module["preloadedImages"]={};Module["preloadedAudios"]={};var dataURIPrefix="data:application/octet-stream;base64,";function isDataURI(filename){return String.prototype.startsWith?filename.startsWith(dataURIPrefix):filename.indexOf(dataURIPrefix)===0}function integrateWasmJS(){var wasmTextFile="avc.wast";var wasmBinaryFile="avc.wasm";var asmjsCodeFile="avc.temp.asm.js";if(typeof Module["locateFile"]==="function"){if(!isDataURI(wasmTextFile)){wasmTextFile=Module["locateFile"](wasmTextFile)}if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=Module["locateFile"](wasmBinaryFile)}if(!isDataURI(asmjsCodeFile)){asmjsCodeFile=Module["locateFile"](asmjsCodeFile)}}var wasmPageSize=64*1024;var info={"global":null,"env":null,"asm2wasm":asm2wasmImports,"parent":Module};var exports=null;function mergeMemory(newBuffer){var oldBuffer=Module["buffer"];if(newBuffer.byteLength>2];return ret}),getStr:(function(){var ret=Pointer_stringify(SYSCALLS.get());return ret}),get64:(function(){var low=SYSCALLS.get(),high=SYSCALLS.get();if(low>=0)assert(high===0);else assert(high===-1);return low}),getZero:(function(){assert(SYSCALLS.get()===0)})};function ___syscall140(which,varargs){SYSCALLS.varargs=varargs;try{var stream=SYSCALLS.getStreamFromFD(),offset_high=SYSCALLS.get(),offset_low=SYSCALLS.get(),result=SYSCALLS.get(),whence=SYSCALLS.get();var offset=offset_low;FS.llseek(stream,offset,whence);HEAP32[result>>2]=stream.position;if(stream.getdents&&offset===0&&whence===0)stream.getdents=null;return 0}catch(e){if(typeof FS==="undefined"||!(e instanceof FS.ErrnoError))abort(e);return-e.errno}}function ___syscall146(which,varargs){SYSCALLS.varargs=varargs;try{var stream=SYSCALLS.get(),iov=SYSCALLS.get(),iovcnt=SYSCALLS.get();var ret=0;if(!___syscall146.buffers){___syscall146.buffers=[null,[],[]];___syscall146.printChar=(function(stream,curr){var buffer=___syscall146.buffers[stream];assert(buffer);if(curr===0||curr===10){(stream===1?Module["print"]:Module["printErr"])(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}})}for(var i=0;i>2];var len=HEAP32[iov+(i*8+4)>>2];for(var j=0;j>2]=value;return value}DYNAMICTOP_PTR=staticAlloc(4);STACK_BASE=STACKTOP=alignMemory(STATICTOP);STACK_MAX=STACK_BASE+TOTAL_STACK;DYNAMIC_BASE=alignMemory(STACK_MAX);HEAP32[DYNAMICTOP_PTR>>2]=DYNAMIC_BASE;staticSealed=true;Module["wasmTableSize"]=10;Module["wasmMaxTableSize"]=10;Module.asmGlobalArg={};Module.asmLibraryArg={"abort":abort,"enlargeMemory":enlargeMemory,"getTotalMemory":getTotalMemory,"abortOnCannotGrowMemory":abortOnCannotGrowMemory,"___setErrNo":___setErrNo,"___syscall140":___syscall140,"___syscall146":___syscall146,"___syscall54":___syscall54,"___syscall6":___syscall6,"_broadwayOnHeadersDecoded":_broadwayOnHeadersDecoded,"_broadwayOnPictureDecoded":_broadwayOnPictureDecoded,"_emscripten_memcpy_big":_emscripten_memcpy_big,"DYNAMICTOP_PTR":DYNAMICTOP_PTR,"STACKTOP":STACKTOP};var asm=Module["asm"](Module.asmGlobalArg,Module.asmLibraryArg,buffer);Module["asm"]=asm;var _broadwayCreateStream=Module["_broadwayCreateStream"]=(function(){return Module["asm"]["_broadwayCreateStream"].apply(null,arguments)});var _broadwayExit=Module["_broadwayExit"]=(function(){return Module["asm"]["_broadwayExit"].apply(null,arguments)});var _broadwayGetMajorVersion=Module["_broadwayGetMajorVersion"]=(function(){return Module["asm"]["_broadwayGetMajorVersion"].apply(null,arguments)});var _broadwayGetMinorVersion=Module["_broadwayGetMinorVersion"]=(function(){return Module["asm"]["_broadwayGetMinorVersion"].apply(null,arguments)});var _broadwayInit=Module["_broadwayInit"]=(function(){return Module["asm"]["_broadwayInit"].apply(null,arguments)});var _broadwayPlayStream=Module["_broadwayPlayStream"]=(function(){return Module["asm"]["_broadwayPlayStream"].apply(null,arguments)});Module["asm"]=asm;function ExitStatus(status){this.name="ExitStatus";this.message="Program terminated with exit("+status+")";this.status=status}ExitStatus.prototype=new Error;ExitStatus.prototype.constructor=ExitStatus;var initialStackTop;dependenciesFulfilled=function runCaller(){if(!Module["calledRun"])run();if(!Module["calledRun"])dependenciesFulfilled=runCaller};function run(args){args=args||Module["arguments"];if(runDependencies>0){return}preRun();if(runDependencies>0)return;if(Module["calledRun"])return;function doRun(){if(Module["calledRun"])return;Module["calledRun"]=true;if(ABORT)return;ensureInitRuntime();preMain();if(Module["onRuntimeInitialized"])Module["onRuntimeInitialized"]();postRun()}if(Module["setStatus"]){Module["setStatus"]("Running...");setTimeout((function(){setTimeout((function(){Module["setStatus"]("")}),1);doRun()}),1)}else{doRun()}}Module["run"]=run;function exit(status,implicit){if(implicit&&Module["noExitRuntime"]&&status===0){return}if(Module["noExitRuntime"]){}else{ABORT=true;EXITSTATUS=status;STACKTOP=initialStackTop;exitRuntime();if(Module["onExit"])Module["onExit"](status)}if(ENVIRONMENT_IS_NODE){process["exit"](status)}Module["quit"](status,new ExitStatus(status))}Module["exit"]=exit;function abort(what){if(Module["onAbort"]){Module["onAbort"](what)}if(what!==undefined){Module.print(what);Module.printErr(what);what=JSON.stringify(what)}else{what=""}ABORT=true;EXITSTATUS=1;throw"abort("+what+"). Build with -s ASSERTIONS=1 for more info."}Module["abort"]=abort;if(Module["preInit"]){if(typeof Module["preInit"]=="function")Module["preInit"]=[Module["preInit"]];while(Module["preInit"].length>0){Module["preInit"].pop()()}}Module["noExitRuntime"]=true;run() + + + + // return Module; + //})(); + + var resultModule; + if (typeof global !== "undefined"){ + if (global.Module){ + resultModule = global.Module; + }; + }; + if (typeof Module != "undefined"){ + resultModule = Module; + }; + + resultModule._broadwayOnHeadersDecoded = par_broadwayOnHeadersDecoded; + resultModule._broadwayOnPictureDecoded = par_broadwayOnPictureDecoded; + + var moduleIsReady = false; + var cbFun; + var moduleReady = function(){ + moduleIsReady = true; + if (cbFun){ + cbFun(resultModule); + } + }; + + resultModule.onRuntimeInitialized = function(){ + moduleReady(resultModule); + }; + return function(callback){ + if (moduleIsReady){ + callback(resultModule); + }else{ + cbFun = callback; + }; + }; + }; + + return (function(){ + "use strict"; + + + var nowValue = function(){ + return (new Date()).getTime(); + }; + + if (typeof performance != "undefined"){ + if (performance.now){ + nowValue = function(){ + return performance.now(); + }; + }; + }; + + + var Decoder = function(parOptions){ + this.options = parOptions || {}; + + this.now = nowValue; + + var asmInstance; + + var fakeWindow = { + }; + + var toU8Array; + var toU32Array; + + var onPicFun = function ($buffer, width, height) { + var buffer = this.pictureBuffers[$buffer]; + if (!buffer) { + buffer = this.pictureBuffers[$buffer] = toU8Array($buffer, (width * height * 3) / 2); + }; + + var infos; + var doInfo = false; + if (this.infoAr.length){ + doInfo = true; + infos = this.infoAr; + }; + this.infoAr = []; + + if (this.options.rgb){ + if (!asmInstance){ + asmInstance = getAsm(width, height); + }; + asmInstance.inp.set(buffer); + asmInstance.doit(); + + var copyU8 = new Uint8Array(asmInstance.outSize); + copyU8.set( asmInstance.out ); + + if (doInfo){ + infos[0].finishDecoding = nowValue(); + }; + + this.onPictureDecoded(copyU8, width, height, infos); + return; + + }; + + if (doInfo){ + infos[0].finishDecoding = nowValue(); + }; + this.onPictureDecoded(buffer, width, height, infos); + }.bind(this); + + var ignore = false; + + if (this.options.sliceMode){ + onPicFun = function ($buffer, width, height, $sliceInfo) { + if (ignore){ + return; + }; + var buffer = this.pictureBuffers[$buffer]; + if (!buffer) { + buffer = this.pictureBuffers[$buffer] = toU8Array($buffer, (width * height * 3) / 2); + }; + var sliceInfo = this.pictureBuffers[$sliceInfo]; + if (!sliceInfo) { + sliceInfo = this.pictureBuffers[$sliceInfo] = toU32Array($sliceInfo, 18); + }; + + var infos; + var doInfo = false; + if (this.infoAr.length){ + doInfo = true; + infos = this.infoAr; + }; + this.infoAr = []; + + /*if (this.options.rgb){ + + no rgb in slice mode + + };*/ + + infos[0].finishDecoding = nowValue(); + var sliceInfoAr = []; + for (var i = 0; i < 20; ++i){ + sliceInfoAr.push(sliceInfo[i]); + }; + infos[0].sliceInfoAr = sliceInfoAr; + + this.onPictureDecoded(buffer, width, height, infos); + }.bind(this); + }; + + var ModuleCallback = getModule.apply(fakeWindow, [function () { + }, onPicFun]); + + + var MAX_STREAM_BUFFER_LENGTH = 1024 * 1024; + + var instance = this; + this.onPictureDecoded = function (buffer, width, height, infos) { + + }; + + this.onDecoderReady = function(){}; + + var bufferedCalls = []; + this.decode = function decode(typedAr, parInfo, copyDoneFun) { + bufferedCalls.push([typedAr, parInfo, copyDoneFun]); + }; + + ModuleCallback(function(Module){ + var HEAP8 = Module.HEAP8; + var HEAPU8 = Module.HEAPU8; + var HEAP16 = Module.HEAP16; + var HEAP32 = Module.HEAP32; + // from old constructor + Module._broadwayInit(); + + /** + * Creates a typed array from a HEAP8 pointer. + */ + toU8Array = function(ptr, length) { + return HEAPU8.subarray(ptr, ptr + length); + }; + toU32Array = function(ptr, length) { + //var tmp = HEAPU8.subarray(ptr, ptr + (length * 4)); + return new Uint32Array(HEAPU8.buffer, ptr, length); + }; + instance.streamBuffer = toU8Array(Module._broadwayCreateStream(MAX_STREAM_BUFFER_LENGTH), MAX_STREAM_BUFFER_LENGTH); + instance.pictureBuffers = {}; + // collect extra infos that are provided with the nal units + instance.infoAr = []; + + /** + * Decodes a stream buffer. This may be one single (unframed) NAL unit without the + * start code, or a sequence of NAL units with framing start code prefixes. This + * function overwrites stream buffer allocated by the codec with the supplied buffer. + */ + + var sliceNum = 0; + if (instance.options.sliceMode){ + sliceNum = instance.options.sliceNum; + + instance.decode = function decode(typedAr, parInfo, copyDoneFun) { + instance.infoAr.push(parInfo); + parInfo.startDecoding = nowValue(); + var nals = parInfo.nals; + var i; + if (!nals){ + nals = []; + parInfo.nals = nals; + var l = typedAr.length; + var foundSomething = false; + var lastFound = 0; + var lastStart = 0; + for (i = 0; i < l; ++i){ + if (typedAr[i] === 1){ + if ( + typedAr[i - 1] === 0 && + typedAr[i - 2] === 0 + ){ + var startPos = i - 2; + if (typedAr[i - 3] === 0){ + startPos = i - 3; + }; + // its a nal; + if (foundSomething){ + nals.push({ + offset: lastFound, + end: startPos, + type: typedAr[lastStart] & 31 + }); + }; + lastFound = startPos; + lastStart = startPos + 3; + if (typedAr[i - 3] === 0){ + lastStart = startPos + 4; + }; + foundSomething = true; + }; + }; + }; + if (foundSomething){ + nals.push({ + offset: lastFound, + end: i, + type: typedAr[lastStart] & 31 + }); + }; + }; + + var currentSlice = 0; + var playAr; + var offset = 0; + for (i = 0; i < nals.length; ++i){ + if (nals[i].type === 1 || nals[i].type === 5){ + if (currentSlice === sliceNum){ + playAr = typedAr.subarray(nals[i].offset, nals[i].end); + instance.streamBuffer[offset] = 0; + offset += 1; + instance.streamBuffer.set(playAr, offset); + offset += playAr.length; + }; + currentSlice += 1; + }else{ + playAr = typedAr.subarray(nals[i].offset, nals[i].end); + instance.streamBuffer[offset] = 0; + offset += 1; + instance.streamBuffer.set(playAr, offset); + offset += playAr.length; + Module._broadwayPlayStream(offset); + offset = 0; + }; + }; + copyDoneFun(); + Module._broadwayPlayStream(offset); + }; + + }else{ + instance.decode = function decode(typedAr, parInfo) { + // console.info("Decoding: " + buffer.length); + // collect infos + if (parInfo){ + instance.infoAr.push(parInfo); + parInfo.startDecoding = nowValue(); + }; + + instance.streamBuffer.set(typedAr); + Module._broadwayPlayStream(typedAr.length); + }; + }; + + if (bufferedCalls.length){ + var bi = 0; + for (bi = 0; bi < bufferedCalls.length; ++bi){ + instance.decode(bufferedCalls[bi][0], bufferedCalls[bi][1], bufferedCalls[bi][2]); + }; + bufferedCalls = []; + }; + + instance.onDecoderReady(instance); + + }); + + + }; + + + Decoder.prototype = { + + }; + + + + + /* + + asm.js implementation of a yuv to rgb convertor + provided by @soliton4 + + based on + http://www.wordsaretoys.com/2013/10/18/making-yuv-conversion-a-little-faster/ + + */ + + + // factory to create asm.js yuv -> rgb convertor for a given resolution + var asmInstances = {}; + var getAsm = function(parWidth, parHeight){ + var idStr = "" + parWidth + "x" + parHeight; + if (asmInstances[idStr]){ + return asmInstances[idStr]; + }; + + var lumaSize = parWidth * parHeight; + var chromaSize = (lumaSize|0) >> 2; + + var inpSize = lumaSize + chromaSize + chromaSize; + var outSize = parWidth * parHeight * 4; + var cacheSize = Math.pow(2, 24) * 4; + var size = inpSize + outSize + cacheSize; + + var chunkSize = Math.pow(2, 24); + var heapSize = chunkSize; + while (heapSize < size){ + heapSize += chunkSize; + }; + var heap = new ArrayBuffer(heapSize); + + var res = asmFactory(global, {}, heap); + res.init(parWidth, parHeight); + asmInstances[idStr] = res; + + res.heap = heap; + res.out = new Uint8Array(heap, 0, outSize); + res.inp = new Uint8Array(heap, outSize, inpSize); + res.outSize = outSize; + + return res; + }; + + + function asmFactory(stdlib, foreign, heap) { + "use asm"; + + var imul = stdlib.Math.imul; + var min = stdlib.Math.min; + var max = stdlib.Math.max; + var pow = stdlib.Math.pow; + var out = new stdlib.Uint8Array(heap); + var out32 = new stdlib.Uint32Array(heap); + var inp = new stdlib.Uint8Array(heap); + var mem = new stdlib.Uint8Array(heap); + var mem32 = new stdlib.Uint32Array(heap); + + // for double algo + /*var vt = 1.370705; + var gt = 0.698001; + var gt2 = 0.337633; + var bt = 1.732446;*/ + + var width = 0; + var height = 0; + var lumaSize = 0; + var chromaSize = 0; + var inpSize = 0; + var outSize = 0; + + var inpStart = 0; + var outStart = 0; + + var widthFour = 0; + + var cacheStart = 0; + + + function init(parWidth, parHeight){ + parWidth = parWidth|0; + parHeight = parHeight|0; + + var i = 0; + var s = 0; + + width = parWidth; + widthFour = imul(parWidth, 4)|0; + height = parHeight; + lumaSize = imul(width|0, height|0)|0; + chromaSize = (lumaSize|0) >> 2; + outSize = imul(imul(width, height)|0, 4)|0; + inpSize = ((lumaSize + chromaSize)|0 + chromaSize)|0; + + outStart = 0; + inpStart = (outStart + outSize)|0; + cacheStart = (inpStart + inpSize)|0; + + // initializing memory (to be on the safe side) + s = ~~(+pow(+2, +24)); + s = imul(s, 4)|0; + + for (i = 0|0; ((i|0) < (s|0))|0; i = (i + 4)|0){ + mem32[((cacheStart + i)|0) >> 2] = 0; + }; + }; + + function doit(){ + var ystart = 0; + var ustart = 0; + var vstart = 0; + + var y = 0; + var yn = 0; + var u = 0; + var v = 0; + + var o = 0; + + var line = 0; + var col = 0; + + var usave = 0; + var vsave = 0; + + var ostart = 0; + var cacheAdr = 0; + + ostart = outStart|0; + + ystart = inpStart|0; + ustart = (ystart + lumaSize|0)|0; + vstart = (ustart + chromaSize)|0; + + for (line = 0; (line|0) < (height|0); line = (line + 2)|0){ + usave = ustart; + vsave = vstart; + for (col = 0; (col|0) < (width|0); col = (col + 2)|0){ + y = inp[ystart >> 0]|0; + yn = inp[((ystart + width)|0) >> 0]|0; + + u = inp[ustart >> 0]|0; + v = inp[vstart >> 0]|0; + + cacheAdr = (((((y << 16)|0) + ((u << 8)|0))|0) + v)|0; + o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0; + if (o){}else{ + o = yuv2rgbcalc(y,u,v)|0; + mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0; + }; + mem32[ostart >> 2] = o; + + cacheAdr = (((((yn << 16)|0) + ((u << 8)|0))|0) + v)|0; + o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0; + if (o){}else{ + o = yuv2rgbcalc(yn,u,v)|0; + mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0; + }; + mem32[((ostart + widthFour)|0) >> 2] = o; + + //yuv2rgb5(y, u, v, ostart); + //yuv2rgb5(yn, u, v, (ostart + widthFour)|0); + ostart = (ostart + 4)|0; + + // next step only for y. u and v stay the same + ystart = (ystart + 1)|0; + y = inp[ystart >> 0]|0; + yn = inp[((ystart + width)|0) >> 0]|0; + + //yuv2rgb5(y, u, v, ostart); + cacheAdr = (((((y << 16)|0) + ((u << 8)|0))|0) + v)|0; + o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0; + if (o){}else{ + o = yuv2rgbcalc(y,u,v)|0; + mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0; + }; + mem32[ostart >> 2] = o; + + //yuv2rgb5(yn, u, v, (ostart + widthFour)|0); + cacheAdr = (((((yn << 16)|0) + ((u << 8)|0))|0) + v)|0; + o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0; + if (o){}else{ + o = yuv2rgbcalc(yn,u,v)|0; + mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0; + }; + mem32[((ostart + widthFour)|0) >> 2] = o; + ostart = (ostart + 4)|0; + + //all positions inc 1 + + ystart = (ystart + 1)|0; + ustart = (ustart + 1)|0; + vstart = (vstart + 1)|0; + }; + ostart = (ostart + widthFour)|0; + ystart = (ystart + width)|0; + + }; + + }; + + function yuv2rgbcalc(y, u, v){ + y = y|0; + u = u|0; + v = v|0; + + var r = 0; + var g = 0; + var b = 0; + + var o = 0; + + var a0 = 0; + var a1 = 0; + var a2 = 0; + var a3 = 0; + var a4 = 0; + + a0 = imul(1192, (y - 16)|0)|0; + a1 = imul(1634, (v - 128)|0)|0; + a2 = imul(832, (v - 128)|0)|0; + a3 = imul(400, (u - 128)|0)|0; + a4 = imul(2066, (u - 128)|0)|0; + + r = (((a0 + a1)|0) >> 10)|0; + g = (((((a0 - a2)|0) - a3)|0) >> 10)|0; + b = (((a0 + a4)|0) >> 10)|0; + + if ((((r & 255)|0) != (r|0))|0){ + r = min(255, max(0, r|0)|0)|0; + }; + if ((((g & 255)|0) != (g|0))|0){ + g = min(255, max(0, g|0)|0)|0; + }; + if ((((b & 255)|0) != (b|0))|0){ + b = min(255, max(0, b|0)|0)|0; + }; + + o = 255; + o = (o << 8)|0; + o = (o + b)|0; + o = (o << 8)|0; + o = (o + g)|0; + o = (o << 8)|0; + o = (o + r)|0; + + return o|0; + + }; + + + + return { + init: init, + doit: doit + }; + }; + + + /* + potential worker initialization + + */ + + + if (typeof self != "undefined"){ + var isWorker = false; + var decoder; + var reuseMemory = false; + var sliceMode = false; + var sliceNum = 0; + var sliceCnt = 0; + var lastSliceNum = 0; + var sliceInfoAr; + var lastBuf; + var awaiting = 0; + var pile = []; + var startDecoding; + var finishDecoding; + var timeDecoding; + + var memAr = []; + var getMem = function(length){ + if (memAr.length){ + var u = memAr.shift(); + while (u && u.byteLength !== length){ + u = memAr.shift(); + }; + if (u){ + return u; + }; + }; + return new ArrayBuffer(length); + }; + + var copySlice = function(source, target, infoAr, width, height){ + + var length = width * height; + var length4 = length / 4 + var plane2 = length; + var plane3 = length + length4; + + var copy16 = function(parBegin, parEnd){ + var i = 0; + for (i = 0; i < 16; ++i){ + var begin = parBegin + (width * i); + var end = parEnd + (width * i) + target.set(source.subarray(begin, end), begin); + }; + }; + var copy8 = function(parBegin, parEnd){ + var i = 0; + for (i = 0; i < 8; ++i){ + var begin = parBegin + ((width / 2) * i); + var end = parEnd + ((width / 2) * i) + target.set(source.subarray(begin, end), begin); + }; + }; + var copyChunk = function(begin, end){ + target.set(source.subarray(begin, end), begin); + }; + + var begin = infoAr[0]; + var end = infoAr[1]; + if (end > 0){ + copy16(begin, end); + copy8(infoAr[2], infoAr[3]); + copy8(infoAr[4], infoAr[5]); + }; + begin = infoAr[6]; + end = infoAr[7]; + if (end > 0){ + copy16(begin, end); + copy8(infoAr[8], infoAr[9]); + copy8(infoAr[10], infoAr[11]); + }; + + begin = infoAr[12]; + end = infoAr[15]; + if (end > 0){ + copyChunk(begin, end); + copyChunk(infoAr[13], infoAr[16]); + copyChunk(infoAr[14], infoAr[17]); + }; + + }; + + var sliceMsgFun = function(){}; + + var setSliceCnt = function(parSliceCnt){ + sliceCnt = parSliceCnt; + lastSliceNum = sliceCnt - 1; + }; + + + self.addEventListener('message', function(e) { + + if (isWorker){ + if (reuseMemory){ + if (e.data.reuse){ + memAr.push(e.data.reuse); + }; + }; + if (e.data.buf){ + if (sliceMode && awaiting !== 0){ + pile.push(e.data); + }else{ + decoder.decode( + new Uint8Array(e.data.buf, e.data.offset || 0, e.data.length), + e.data.info, + function(){ + if (sliceMode && sliceNum !== lastSliceNum){ + postMessage(e.data, [e.data.buf]); + }; + } + ); + }; + return; + }; + + if (e.data.slice){ + // update ref pic + var copyStart = nowValue(); + copySlice(new Uint8Array(e.data.slice), lastBuf, e.data.infos[0].sliceInfoAr, e.data.width, e.data.height); + // is it the one? then we need to update it + if (e.data.theOne){ + copySlice(lastBuf, new Uint8Array(e.data.slice), sliceInfoAr, e.data.width, e.data.height); + if (timeDecoding > e.data.infos[0].timeDecoding){ + e.data.infos[0].timeDecoding = timeDecoding; + }; + e.data.infos[0].timeCopy += (nowValue() - copyStart); + }; + // move on + postMessage(e.data, [e.data.slice]); + + // next frame in the pipe? + awaiting -= 1; + if (awaiting === 0 && pile.length){ + var data = pile.shift(); + decoder.decode( + new Uint8Array(data.buf, data.offset || 0, data.length), + data.info, + function(){ + if (sliceMode && sliceNum !== lastSliceNum){ + postMessage(data, [data.buf]); + }; + } + ); + }; + return; + }; + + if (e.data.setSliceCnt){ + setSliceCnt(e.data.sliceCnt); + return; + }; + + }else{ + if (e.data && e.data.type === "Broadway.js - Worker init"){ + isWorker = true; + decoder = new Decoder(e.data.options); + + if (e.data.options.sliceMode){ + reuseMemory = true; + sliceMode = true; + sliceNum = e.data.options.sliceNum; + setSliceCnt(e.data.options.sliceCnt); + + decoder.onPictureDecoded = function (buffer, width, height, infos) { + + // buffer needs to be copied because we give up ownership + var copyU8 = new Uint8Array(getMem(buffer.length)); + copySlice(buffer, copyU8, infos[0].sliceInfoAr, width, height); + + startDecoding = infos[0].startDecoding; + finishDecoding = infos[0].finishDecoding; + timeDecoding = finishDecoding - startDecoding; + infos[0].timeDecoding = timeDecoding; + infos[0].timeCopy = 0; + + postMessage({ + slice: copyU8.buffer, + sliceNum: sliceNum, + width: width, + height: height, + infos: infos + }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership + + awaiting = sliceCnt - 1; + + lastBuf = buffer; + sliceInfoAr = infos[0].sliceInfoAr; + + }; + + }else if (e.data.options.reuseMemory){ + reuseMemory = true; + decoder.onPictureDecoded = function (buffer, width, height, infos) { + + // buffer needs to be copied because we give up ownership + var copyU8 = new Uint8Array(getMem(buffer.length)); + copyU8.set( buffer, 0, buffer.length ); + + postMessage({ + buf: copyU8.buffer, + length: buffer.length, + width: width, + height: height, + infos: infos + }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership + + }; + + }else{ + decoder.onPictureDecoded = function (buffer, width, height, infos) { + if (buffer) { + buffer = new Uint8Array(buffer); + }; + + // buffer needs to be copied because we give up ownership + var copyU8 = new Uint8Array(buffer.length); + copyU8.set( buffer, 0, buffer.length ); + + postMessage({ + buf: copyU8.buffer, + length: buffer.length, + width: width, + height: height, + infos: infos + }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership + + }; + }; + postMessage({ consoleLog: "broadway worker initialized" }); + }; + }; + + + }, false); + }; + + Decoder.nowValue = nowValue; + + return Decoder; + + })(); + + +})); + diff --git a/carsrun/templates/Player.js b/carsrun/templates/Player.js new file mode 100644 index 0000000..ff2e649 --- /dev/null +++ b/carsrun/templates/Player.js @@ -0,0 +1,335 @@ +/* + + +usage: + +p = new Player({ + useWorker: , + workerFile: // give path to Decoder.js + webgl: true | false | "auto" // defaults to "auto" +}); + +// canvas property represents the canvas node +// put it somewhere in the dom +p.canvas; + +p.webgl; // contains the used rendering mode. if you pass auto to webgl you can see what auto detection resulted in + +p.decode(); + + +*/ + + + +// universal module definition +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(["./Decoder", "./YUVCanvas"], factory); + } else if (typeof exports === 'object') { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(require("./Decoder"), require("./YUVCanvas")); + } else { + // Browser globals (root is window) + root.Player = factory(root.Decoder, root.YUVCanvas); + } +}(this, function (Decoder, WebGLCanvas) { + "use strict"; + + + var nowValue = Decoder.nowValue; + + + var Player = function(parOptions){ + var self = this; + this._config = parOptions || {}; + + this.render = true; + if (this._config.render === false){ + this.render = false; + }; + + this.nowValue = nowValue; + + this._config.workerFile = this._config.workerFile || "Decoder.js"; + if (this._config.preserveDrawingBuffer){ + this._config.contextOptions = this._config.contextOptions || {}; + this._config.contextOptions.preserveDrawingBuffer = true; + }; + + var webgl = "auto"; + if (this._config.webgl === true){ + webgl = true; + }else if (this._config.webgl === false){ + webgl = false; + }; + + if (webgl == "auto"){ + webgl = true; + try{ + if (!window.WebGLRenderingContext) { + // the browser doesn't even know what WebGL is + webgl = false; + } else { + var canvas = document.createElement('canvas'); + var ctx = canvas.getContext("webgl"); + if (!ctx) { + // browser supports WebGL but initialization failed. + webgl = false; + }; + }; + }catch(e){ + webgl = false; + }; + }; + + this.webgl = webgl; + + // choose functions + if (this.webgl){ + this.createCanvasObj = this.createCanvasWebGL; + this.renderFrame = this.renderFrameWebGL; + }else{ + this.createCanvasObj = this.createCanvasRGB; + this.renderFrame = this.renderFrameRGB; + }; + + + var lastWidth; + var lastHeight; + var onPictureDecoded = function(buffer, width, height, infos) { + self.onPictureDecoded(buffer, width, height, infos); + + var startTime = nowValue(); + + if (!buffer || !self.render) { + return; + }; + + self.renderFrame({ + canvasObj: self.canvasObj, + data: buffer, + width: width, + height: height + }); + + if (self.onRenderFrameComplete){ + self.onRenderFrameComplete({ + data: buffer, + width: width, + height: height, + infos: infos, + canvasObj: self.canvasObj + }); + }; + + }; + + // provide size + + if (!this._config.size){ + this._config.size = {}; + }; + this._config.size.width = this._config.size.width || 200; + this._config.size.height = this._config.size.height || 200; + + if (this._config.useWorker){ + var worker = new Worker(this._config.workerFile); + this.worker = worker; + worker.addEventListener('message', function(e) { + var data = e.data; + if (data.consoleLog){ + console.log(data.consoleLog); + return; + }; + + onPictureDecoded.call(self, new Uint8Array(data.buf, 0, data.length), data.width, data.height, data.infos); + + }, false); + + worker.postMessage({type: "Broadway.js - Worker init", options: { + rgb: !webgl, + memsize: this.memsize, + reuseMemory: this._config.reuseMemory ? true : false + }}); + + if (this._config.transferMemory){ + this.decode = function(parData, parInfo){ + // no copy + // instead we are transfering the ownership of the buffer + // dangerous!!! + + worker.postMessage({buf: parData.buffer, offset: parData.byteOffset, length: parData.length, info: parInfo}, [parData.buffer]); // Send data to our worker. + }; + + }else{ + this.decode = function(parData, parInfo){ + // Copy the sample so that we only do a structured clone of the + // region of interest + var copyU8 = new Uint8Array(parData.length); + copyU8.set( parData, 0, parData.length ); + worker.postMessage({buf: copyU8.buffer, offset: 0, length: parData.length, info: parInfo}, [copyU8.buffer]); // Send data to our worker. + }; + + }; + + if (this._config.reuseMemory){ + this.recycleMemory = function(parArray){ + //this.beforeRecycle(); + worker.postMessage({reuse: parArray.buffer}, [parArray.buffer]); // Send data to our worker. + //this.afterRecycle(); + }; + } + + }else{ + + this.decoder = new Decoder({ + rgb: !webgl + }); + this.decoder.onPictureDecoded = onPictureDecoded; + + this.decode = function(parData, parInfo){ + self.decoder.decode(parData, parInfo); + }; + + }; + + + + if (this.render){ + this.canvasObj = this.createCanvasObj({ + contextOptions: this._config.contextOptions + }); + this.canvas = this.canvasObj.canvas; + }; + + this.domNode = this.canvas; + + lastWidth = this._config.size.width; + lastHeight = this._config.size.height; + + }; + + Player.prototype = { + + onPictureDecoded: function(buffer, width, height, infos){}, + + // call when memory of decoded frames is not used anymore + recycleMemory: function(buf){ + }, + /*beforeRecycle: function(){}, + afterRecycle: function(){},*/ + + // for both functions options is: + // + // width + // height + // enableScreenshot + // + // returns a object that has a property canvas which is a html5 canvas + createCanvasWebGL: function(options){ + var canvasObj = this._createBasicCanvasObj(options); + canvasObj.contextOptions = options.contextOptions; + return canvasObj; + }, + + createCanvasRGB: function(options){ + var canvasObj = this._createBasicCanvasObj(options); + return canvasObj; + }, + + // part that is the same for webGL and RGB + _createBasicCanvasObj: function(options){ + options = options || {}; + + var obj = {}; + var width = options.width; + if (!width){ + width = this._config.size.width; + }; + var height = options.height; + if (!height){ + height = this._config.size.height; + }; + obj.canvas = document.createElement('canvas'); + obj.canvas.width = width; + obj.canvas.height = height; + obj.canvas.style.backgroundColor = "#0D0E1B"; + + + return obj; + }, + + // options: + // + // canvas + // data + renderFrameWebGL: function(options){ + + var canvasObj = options.canvasObj; + + var width = options.width || canvasObj.canvas.width; + var height = options.height || canvasObj.canvas.height; + + if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height || !canvasObj.webGLCanvas){ + canvasObj.canvas.width = width; + canvasObj.canvas.height = height; + canvasObj.webGLCanvas = new WebGLCanvas({ + canvas: canvasObj.canvas, + contextOptions: canvasObj.contextOptions, + width: width, + height: height + }); + }; + + var ylen = width * height; + var uvlen = (width / 2) * (height / 2); + + canvasObj.webGLCanvas.drawNextOutputPicture({ + yData: options.data.subarray(0, ylen), + uData: options.data.subarray(ylen, ylen + uvlen), + vData: options.data.subarray(ylen + uvlen, ylen + uvlen + uvlen) + }); + + var self = this; + self.recycleMemory(options.data); + + }, + renderFrameRGB: function(options){ + var canvasObj = options.canvasObj; + + var width = options.width || canvasObj.canvas.width; + var height = options.height || canvasObj.canvas.height; + + if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height){ + canvasObj.canvas.width = width; + canvasObj.canvas.height = height; + }; + + var ctx = canvasObj.ctx; + var imgData = canvasObj.imgData; + + if (!ctx){ + canvasObj.ctx = canvasObj.canvas.getContext('2d'); + ctx = canvasObj.ctx; + + canvasObj.imgData = ctx.createImageData(width, height); + imgData = canvasObj.imgData; + }; + + imgData.data.set(options.data); + ctx.putImageData(imgData, 0, 0); + var self = this; + self.recycleMemory(options.data); + + } + + }; + + return Player; + +})); + diff --git a/carsrun/templates/YUVCanvas.js b/carsrun/templates/YUVCanvas.js new file mode 100644 index 0000000..1bca8a3 --- /dev/null +++ b/carsrun/templates/YUVCanvas.js @@ -0,0 +1,551 @@ +// +// Copyright (c) 2015 Paperspace Co. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +// IN THE SOFTWARE. +// + + +// universal module definition +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define([], factory); + } else if (typeof exports === 'object') { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(); + } else { + // Browser globals (root is window) + root.YUVCanvas = factory(); + } +}(this, function () { + + +/** + * This class can be used to render output pictures from an H264bsdDecoder to a canvas element. + * If available the content is rendered using WebGL. + */ + function YUVCanvas(parOptions) { + + parOptions = parOptions || {}; + + this.canvasElement = parOptions.canvas || document.createElement("canvas"); + this.contextOptions = parOptions.contextOptions; + + this.type = parOptions.type || "yuv420"; + + this.customYUV444 = parOptions.customYUV444; + + this.conversionType = parOptions.conversionType || "rec601"; + + this.width = parOptions.width || 640; + this.height = parOptions.height || 320; + + this.animationTime = parOptions.animationTime || 0; + + this.canvasElement.width = this.width; + this.canvasElement.height = this.height; + + this.initContextGL(); + + if(this.contextGL) { + this.initProgram(); + this.initBuffers(); + this.initTextures(); + }; + + +/** + * Draw the next output picture using WebGL + */ + if (this.type === "yuv420"){ + this.drawNextOuptutPictureGL = function(par) { + var gl = this.contextGL; + var texturePosBuffer = this.texturePosBuffer; + var uTexturePosBuffer = this.uTexturePosBuffer; + var vTexturePosBuffer = this.vTexturePosBuffer; + + var yTextureRef = this.yTextureRef; + var uTextureRef = this.uTextureRef; + var vTextureRef = this.vTextureRef; + + var yData = par.yData; + var uData = par.uData; + var vData = par.vData; + + var width = this.width; + var height = this.height; + + var yDataPerRow = par.yDataPerRow || width; + var yRowCnt = par.yRowCnt || height; + + var uDataPerRow = par.uDataPerRow || (width / 2); + var uRowCnt = par.uRowCnt || (height / 2); + + var vDataPerRow = par.vDataPerRow || uDataPerRow; + var vRowCnt = par.vRowCnt || uRowCnt; + + gl.viewport(0, 0, width, height); + + var tTop = 0; + var tLeft = 0; + var tBottom = height / yRowCnt; + var tRight = width / yDataPerRow; + var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); + + gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW); + + if (this.customYUV444){ + tBottom = height / uRowCnt; + tRight = width / uDataPerRow; + }else{ + tBottom = (height / 2) / uRowCnt; + tRight = (width / 2) / uDataPerRow; + }; + var uTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); + + gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, uTexturePosValues, gl.DYNAMIC_DRAW); + + + if (this.customYUV444){ + tBottom = height / vRowCnt; + tRight = width / vDataPerRow; + }else{ + tBottom = (height / 2) / vRowCnt; + tRight = (width / 2) / vDataPerRow; + }; + var vTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); + + gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, vTexturePosValues, gl.DYNAMIC_DRAW); + + + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, yTextureRef); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, yDataPerRow, yRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, yData); + + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, uTextureRef); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, uDataPerRow, uRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uData); + + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, vTextureRef); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, vDataPerRow, vRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, vData); + + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + }; + + }else if (this.type === "yuv422"){ + this.drawNextOuptutPictureGL = function(par) { + var gl = this.contextGL; + var texturePosBuffer = this.texturePosBuffer; + + var textureRef = this.textureRef; + + var data = par.data; + + var width = this.width; + var height = this.height; + + var dataPerRow = par.dataPerRow || (width * 2); + var rowCnt = par.rowCnt || height; + + gl.viewport(0, 0, width, height); + + var tTop = 0; + var tLeft = 0; + var tBottom = height / rowCnt; + var tRight = width / (dataPerRow / 2); + var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); + + gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW); + + gl.uniform2f(gl.getUniformLocation(this.shaderProgram, 'resolution'), dataPerRow, height); + + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, textureRef); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, dataPerRow, rowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data); + + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + }; + }; + + }; + + /** + * Returns true if the canvas supports WebGL + */ + YUVCanvas.prototype.isWebGL = function() { + return this.contextGL; + }; + + /** + * Create the GL context from the canvas element + */ + YUVCanvas.prototype.initContextGL = function() { + var canvas = this.canvasElement; + var gl = null; + + var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"]; + var nameIndex = 0; + + while(!gl && nameIndex < validContextNames.length) { + var contextName = validContextNames[nameIndex]; + + try { + if (this.contextOptions){ + gl = canvas.getContext(contextName, this.contextOptions); + }else{ + gl = canvas.getContext(contextName); + }; + } catch (e) { + gl = null; + } + + if(!gl || typeof gl.getParameter !== "function") { + gl = null; + } + + ++nameIndex; + }; + + this.contextGL = gl; + }; + +/** + * Initialize GL shader program + */ +YUVCanvas.prototype.initProgram = function() { + var gl = this.contextGL; + + // vertex shader is the same for all types + var vertexShaderScript; + var fragmentShaderScript; + + if (this.type === "yuv420"){ + + vertexShaderScript = [ + 'attribute vec4 vertexPos;', + 'attribute vec4 texturePos;', + 'attribute vec4 uTexturePos;', + 'attribute vec4 vTexturePos;', + 'varying vec2 textureCoord;', + 'varying vec2 uTextureCoord;', + 'varying vec2 vTextureCoord;', + + 'void main()', + '{', + ' gl_Position = vertexPos;', + ' textureCoord = texturePos.xy;', + ' uTextureCoord = uTexturePos.xy;', + ' vTextureCoord = vTexturePos.xy;', + '}' + ].join('\n'); + + fragmentShaderScript = [ + 'precision highp float;', + 'varying highp vec2 textureCoord;', + 'varying highp vec2 uTextureCoord;', + 'varying highp vec2 vTextureCoord;', + 'uniform sampler2D ySampler;', + 'uniform sampler2D uSampler;', + 'uniform sampler2D vSampler;', + 'uniform mat4 YUV2RGB;', + + 'void main(void) {', + ' highp float y = texture2D(ySampler, textureCoord).r;', + ' highp float u = texture2D(uSampler, uTextureCoord).r;', + ' highp float v = texture2D(vSampler, vTextureCoord).r;', + ' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;', + '}' + ].join('\n'); + + }else if (this.type === "yuv422"){ + vertexShaderScript = [ + 'attribute vec4 vertexPos;', + 'attribute vec4 texturePos;', + 'varying vec2 textureCoord;', + + 'void main()', + '{', + ' gl_Position = vertexPos;', + ' textureCoord = texturePos.xy;', + '}' + ].join('\n'); + + fragmentShaderScript = [ + 'precision highp float;', + 'varying highp vec2 textureCoord;', + 'uniform sampler2D sampler;', + 'uniform highp vec2 resolution;', + 'uniform mat4 YUV2RGB;', + + 'void main(void) {', + + ' highp float texPixX = 1.0 / resolution.x;', + ' highp float logPixX = 2.0 / resolution.x;', // half the resolution of the texture + ' highp float logHalfPixX = 4.0 / resolution.x;', // half of the logical resolution so every 4th pixel + ' highp float steps = floor(textureCoord.x / logPixX);', + ' highp float uvSteps = floor(textureCoord.x / logHalfPixX);', + ' highp float y = texture2D(sampler, vec2((logPixX * steps) + texPixX, textureCoord.y)).r;', + ' highp float u = texture2D(sampler, vec2((logHalfPixX * uvSteps), textureCoord.y)).r;', + ' highp float v = texture2D(sampler, vec2((logHalfPixX * uvSteps) + texPixX + texPixX, textureCoord.y)).r;', + + //' highp float y = texture2D(sampler, textureCoord).r;', + //' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;', + ' gl_FragColor = vec4(y, u, v, 1.0) * YUV2RGB;', + '}' + ].join('\n'); + }; + + var YUV2RGB = []; + + if (this.conversionType == "rec709") { + // ITU-T Rec. 709 + YUV2RGB = [ + 1.16438, 0.00000, 1.79274, -0.97295, + 1.16438, -0.21325, -0.53291, 0.30148, + 1.16438, 2.11240, 0.00000, -1.13340, + 0, 0, 0, 1, + ]; + } else { + // assume ITU-T Rec. 601 + YUV2RGB = [ + 1.16438, 0.00000, 1.59603, -0.87079, + 1.16438, -0.39176, -0.81297, 0.52959, + 1.16438, 2.01723, 0.00000, -1.08139, + 0, 0, 0, 1 + ]; + }; + + var vertexShader = gl.createShader(gl.VERTEX_SHADER); + gl.shaderSource(vertexShader, vertexShaderScript); + gl.compileShader(vertexShader); + if(!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) { + console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader)); + } + + var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); + gl.shaderSource(fragmentShader, fragmentShaderScript); + gl.compileShader(fragmentShader); + if(!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) { + console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader)); + } + + var program = gl.createProgram(); + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + if(!gl.getProgramParameter(program, gl.LINK_STATUS)) { + console.log('Program failed to compile: ' + gl.getProgramInfoLog(program)); + } + + gl.useProgram(program); + + var YUV2RGBRef = gl.getUniformLocation(program, 'YUV2RGB'); + gl.uniformMatrix4fv(YUV2RGBRef, false, YUV2RGB); + + this.shaderProgram = program; +}; + +/** + * Initialize vertex buffers and attach to shader program + */ +YUVCanvas.prototype.initBuffers = function() { + var gl = this.contextGL; + var program = this.shaderProgram; + + var vertexPosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW); + + var vertexPosRef = gl.getAttribLocation(program, 'vertexPos'); + gl.enableVertexAttribArray(vertexPosRef); + gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0); + + if (this.animationTime){ + + var animationTime = this.animationTime; + var timePassed = 0; + var stepTime = 15; + + var aniFun = function(){ + + timePassed += stepTime; + var mul = ( 1 * timePassed ) / animationTime; + + if (timePassed >= animationTime){ + mul = 1; + }else{ + setTimeout(aniFun, stepTime); + }; + + var neg = -1 * mul; + var pos = 1 * mul; + + var vertexPosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([pos, pos, neg, pos, pos, neg, neg, neg]), gl.STATIC_DRAW); + + var vertexPosRef = gl.getAttribLocation(program, 'vertexPos'); + gl.enableVertexAttribArray(vertexPosRef); + gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0); + + try{ + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + }catch(e){}; + + }; + aniFun(); + + }; + + + + var texturePosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); + + var texturePosRef = gl.getAttribLocation(program, 'texturePos'); + gl.enableVertexAttribArray(texturePosRef); + gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0); + + this.texturePosBuffer = texturePosBuffer; + + if (this.type === "yuv420"){ + var uTexturePosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); + + var uTexturePosRef = gl.getAttribLocation(program, 'uTexturePos'); + gl.enableVertexAttribArray(uTexturePosRef); + gl.vertexAttribPointer(uTexturePosRef, 2, gl.FLOAT, false, 0, 0); + + this.uTexturePosBuffer = uTexturePosBuffer; + + + var vTexturePosBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); + + var vTexturePosRef = gl.getAttribLocation(program, 'vTexturePos'); + gl.enableVertexAttribArray(vTexturePosRef); + gl.vertexAttribPointer(vTexturePosRef, 2, gl.FLOAT, false, 0, 0); + + this.vTexturePosBuffer = vTexturePosBuffer; + }; + +}; + +/** + * Initialize GL textures and attach to shader program + */ +YUVCanvas.prototype.initTextures = function() { + var gl = this.contextGL; + var program = this.shaderProgram; + + if (this.type === "yuv420"){ + + var yTextureRef = this.initTexture(); + var ySamplerRef = gl.getUniformLocation(program, 'ySampler'); + gl.uniform1i(ySamplerRef, 0); + this.yTextureRef = yTextureRef; + + var uTextureRef = this.initTexture(); + var uSamplerRef = gl.getUniformLocation(program, 'uSampler'); + gl.uniform1i(uSamplerRef, 1); + this.uTextureRef = uTextureRef; + + var vTextureRef = this.initTexture(); + var vSamplerRef = gl.getUniformLocation(program, 'vSampler'); + gl.uniform1i(vSamplerRef, 2); + this.vTextureRef = vTextureRef; + + }else if (this.type === "yuv422"){ + // only one texture for 422 + var textureRef = this.initTexture(); + var samplerRef = gl.getUniformLocation(program, 'sampler'); + gl.uniform1i(samplerRef, 0); + this.textureRef = textureRef; + + }; +}; + +/** + * Create and configure a single texture + */ +YUVCanvas.prototype.initTexture = function() { + var gl = this.contextGL; + + var textureRef = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, textureRef); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.bindTexture(gl.TEXTURE_2D, null); + + return textureRef; +}; + +/** + * Draw picture data to the canvas. + * If this object is using WebGL, the data must be an I420 formatted ArrayBuffer, + * Otherwise, data must be an RGBA formatted ArrayBuffer. + */ +YUVCanvas.prototype.drawNextOutputPicture = function(width, height, croppingParams, data) { + var gl = this.contextGL; + + if(gl) { + this.drawNextOuptutPictureGL(width, height, croppingParams, data); + } else { + this.drawNextOuptutPictureRGBA(width, height, croppingParams, data); + } +}; + + + +/** + * Draw next output picture using ARGB data on a 2d canvas. + */ +YUVCanvas.prototype.drawNextOuptutPictureRGBA = function(width, height, croppingParams, data) { + var canvas = this.canvasElement; + + var croppingParams = null; + + var argbData = data; + + var ctx = canvas.getContext('2d'); + var imageData = ctx.getImageData(0, 0, width, height); + imageData.data.set(argbData); + + if(croppingParams === null) { + ctx.putImageData(imageData, 0, 0); + } else { + ctx.putImageData(imageData, -croppingParams.left, -croppingParams.top, 0, 0, croppingParams.width, croppingParams.height); + } +}; + + return YUVCanvas; + +})); diff --git a/carsrun/templates/avc.wasm b/carsrun/templates/avc.wasm new file mode 100644 index 0000000..378ac32 Binary files /dev/null and b/carsrun/templates/avc.wasm differ diff --git a/carsrun/templates/index-t.html b/carsrun/templates/index-t.html new file mode 100644 index 0000000..b0afa2a --- /dev/null +++ b/carsrun/templates/index-t.html @@ -0,0 +1,174 @@ + + + + + + + + Live Video Based on Flask + + + + + +

小车控制界面

+ + 小车控制 +
+ +
+ +
+
+ +
+ +
+ + + +
+
+ +
+
+ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/carsrun/templates/index.html b/carsrun/templates/index.html new file mode 100644 index 0000000..385df2a --- /dev/null +++ b/carsrun/templates/index.html @@ -0,0 +1,187 @@ + + + + + 小车拍摄画面 + + + + + + +

MJRoBot Lab Live Streaming

+
+ + +
+
+

小车控制界面

+ + 小车控制 +
+ +
+ +
+
+ + + +
+
+ +
+
+ +
+
+ + + + + + + + + + + + + + + diff --git a/carsrun/templates/index2.html b/carsrun/templates/index2.html new file mode 100644 index 0000000..1694c9f --- /dev/null +++ b/carsrun/templates/index2.html @@ -0,0 +1,19 @@ + + + + + 小车拍摄画面 + + + +

MJRoBot Lab Live Streaming

+

+
+

小车控制界面

+ + 小车控制 + + diff --git a/carsrun/templates/index3.html b/carsrun/templates/index3.html new file mode 100644 index 0000000..bcb2b84 --- /dev/null +++ b/carsrun/templates/index3.html @@ -0,0 +1,34 @@ + + + + +PiCamera H264 Streaming + + +

PiCamera H264 Streaming

+
+ + + + + + diff --git a/carsrun/templates/style.css b/carsrun/templates/style.css new file mode 100644 index 0000000..6030dec --- /dev/null +++ b/carsrun/templates/style.css @@ -0,0 +1,9 @@ + + +body{ + background: white; + color: black; + padding:1%; + text-align: center; +} +