jk_20241024-17:58

chw
陈@jkccchen123 4 weeks ago
parent 1453c4cfce
commit c337aebae6

@ -0,0 +1,67 @@
#!/usr/bin/env python3
# vuquangtrong.github.io
import io
import picamera
import time
from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
from wsgiref.simple_server import make_server
from ws4py.websocket import WebSocket
from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIHandler, WebSocketWSGIRequestHandler
from ws4py.server.wsgiutils import WebSocketWSGIApplication
from threading import Thread, Condition
class FrameBuffer(object):
def __init__(self):
self.frame = None
self.buffer = io.BytesIO()
self.condition = Condition()
def write(self, buf):
if buf.startswith(b'\x00\x00\x00\x01'):
with self.condition:
self.buffer.seek(0)
self.buffer.write(buf)
self.buffer.truncate()
self.frame = self.buffer.getvalue()
self.condition.notify_all()
def stream():
with picamera.PiCamera(resolution='640x480', framerate=24) as camera:
broadcasting = True
frame_buffer = FrameBuffer()
camera.start_recording(frame_buffer, format='h264', profile="baseline")
try:
WebSocketWSGIHandler.http_version = '1.1'
websocketd = make_server('', 9000, server_class=WSGIServer,
handler_class=WebSocketWSGIRequestHandler,
app=WebSocketWSGIApplication(handler_cls=WebSocket))
websocketd.initialize_websockets_manager()
websocketd_thread = Thread(target=websocketd.serve_forever)
httpd = ThreadingHTTPServer(('', 8000), SimpleHTTPRequestHandler)
httpd_thread = Thread(target=httpd.serve_forever)
try:
websocketd_thread.start()
httpd_thread.start()
while broadcasting:
with frame_buffer.condition:
frame_buffer.condition.wait()
websocketd.manager.broadcast(frame_buffer.frame, binary=True)
except KeyboardInterrupt:
pass
finally:
websocketd.shutdown()
httpd.shutdown()
broadcasting = False
raise KeyboardInterrupt
except KeyboardInterrupt:
pass
finally:
camera.stop_recording()
if __name__ == "__main__":
stream()

@ -0,0 +1,136 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# appCam.py
# based on tutorial ==> https://blog.miguelgrinberg.com/post/video-streaming-with-flask
# PiCam Local Web Server with Flask
# MJRoBot.org 19Jan18
from flask import Flask, render_template, Response, redirect, url_for, send_file, jsonify, request
# Raspberry Pi camera module (requires picamera package)
#from camera_pi import Camera
from picamera2 import Picamera2
import os
import time
from gevent import pywsgi
from car import CAR
#from PiCamera_H264_Server import stream
import threading
import cv2
app = Flask(__name__, static_url_path='')
def gen_frames(): # generate frame by frame from camera
picam2 = Picamera2()
picam2.configure(picam2.create_video_configuration(main={"format": 'XRGB8888', "size": (640, 480)}))
picam2.start()
while True:
# Capture frame-by-frame
frame = picam2.capture_array() # read the camera frame
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
ret, buffer = cv2.imencode('.jpg', frame)
frame = buffer.tobytes()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') # concat frame one by one and show result
@app.route('/')
def index():
return render_template('index-t.html')
@app.route('/video_feed')
def video_feed():
#Video streaming route. Put this in the src attribute of an img tag
return Response(gen_frames(), mimetype='multipart/x-mixed-replace; boundary=frame')
# def gen(camera):
# """Video streaming generator function."""
# while True:
# frame = camera.get_frame()
# yield (b'--frame\r\n'
# b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
# @app.route('/capture')
# def capture():
# pic = open("qrcode.png", "wb")
# frame = Camera().get_frame()
# pic.write(frame)
# return Response(b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n',
# mimetype='multipart/x-mixed-replace; boundary=frame')
# #return send_file("qrcode.png", mimetype='image/png')
# #return redirect(url_for('index'))
# @app.route('/video_feed')
# def video_feed():
# """Video streaming route. Put this in the src attribute of an img tag."""
# return Response(gen(Camera()),
# mimetype='multipart/x-mixed-replace; boundary=frame')
car = CAR()
@app.route('/control/')
def control_index():
word = """指令:\n
/led: 灯光闪烁\n
/led_light: 打开全部灯光\n
/led_dark: 关闭全部灯光\n
/stop(/Q): 小车停止运动\n
/forward(/W): 小车开始运动\n
/back(/S): 小车向后运动\n
/left(/A): 小车向左运动\n
/right(/D): 小车向右运动\n"""
print(word)
return word
# def execute_forward_function():
# url = "http://192.168.185.242:80/send_command" # 示例URL实际使用时需要替换为正确的服务器地址
# data = {"command": 'base -c {"T":1,"L":0.5,"R":0.5}'} # 请求体数据
# # 发送请求并打印返回结果
# try:
# response = request.post(url, data=data)
# print(response.text) # 打印服务器返回的内容
# except request.exceptions.RequestException as e:
# print(f"请求发生错误: {e}")
# print("执行前进功能")
# # 返回一些结果
# return "前进功能已执行"
# @app.route('/control/forward', methods=['GET'])
# def control_forward():
# try:
# # 获取前端发送的数据
# data = request.args
# # 调试输出接收到的数据
# print("接收到的数据:", data)
# # 执行你的函数
# result = execute_forward_function() # 确保这个函数是可以被调用的,如果有必要打印它的返回值
# # 返回JSON响应, result 需要确保是可json化的对象
# return jsonify({
# "resultCode": 200,
# "message": "请求成功",
# "data": result
# })
# except Exception as e:
# print(f"发生错误: {e}") # 打印异常
# return jsonify({"resultCode": 500, "message": "内部服务器错误"}), 500 # 返回500错误
@app.route('/control/<path:info>')
def fun(info):
if hasattr(car, info):
getattr(car, info)()
return 'Run: '+info+'\n'
else:
return 'Error: '+info+' not be defined\n'
if __name__ == '__main__':
app.run(host='0.0.0.0', port =80, debug=True, threaded=True)
# t = threading.Thread(target=stream)
# t.start()
# server = pywsgi.WSGIServer(('0.0.0.0', 80), app)
# server.serve_forever()

@ -0,0 +1,31 @@
from flask import Flask, render_template, Response
from picamera2 import Picamera2
import time
import cv2
app = Flask(__name__)
def gen_frames(): # generate frame by frame from camera
picam2 = Picamera2()
picam2.configure(picam2.create_video_configuration(main={"format": 'XRGB8888', "size": (640, 480)}))
picam2.start()
while True:
# Capture frame-by-frame
frame = picam2.capture_array() # read the camera frame
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
ret, buffer = cv2.imencode('.jpg', frame)
frame = buffer.tobytes()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') # concat frame one by one and show result
@app.route('/')
def index():
return render_template('index.html')
@app.route('/video_feed')
def video_feed():
#Video streaming route. Put this in the src attribute of an img tag
return Response(gen_frames(), mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=True)

@ -0,0 +1,286 @@
import serial
import json
import queue
import threading
import yaml
import os
import time
import glob
import numpy as np
curpath = os.path.realpath(__file__)
thisPath = os.path.dirname(curpath)
with open(thisPath + '/config.yaml', 'r') as yaml_file:
f = yaml.safe_load(yaml_file)
class ReadLine:
def __init__(self, s):
self.buf = bytearray()
self.s = s
self.sensor_data = []
self.sensor_list = []
try:
self.sensor_data_ser = serial.Serial(glob.glob('/dev/ttyUSB*')[0], 115200)
print("/dev/ttyUSB* connected succeed")
except:
self.sensor_data_ser = None
self.sensor_data_max_len = 51
try:
self.lidar_ser = serial.Serial(glob.glob('/dev/ttyACM*')[0], 230400, timeout=1)
print("/dev/ttyACM* connected succeed")
except:
self.lidar_ser = None
self.ANGLE_PER_FRAME = 12
self.HEADER = 0x54
self.lidar_angles = []
self.lidar_distances = []
self.lidar_angles_show = []
self.lidar_distances_show = []
self.last_start_angle = 0
def readline(self):
i = self.buf.find(b"\n")
if i >= 0:
r = self.buf[:i+1]
self.buf = self.buf[i+1:]
return r
while True:
i = max(1, min(512, self.s.in_waiting))
data = self.s.read(i)
i = data.find(b"\n")
if i >= 0:
r = self.buf + data[:i+1]
self.buf[0:] = data[i+1:]
return r
else:
self.buf.extend(data)
def clear_buffer(self):
self.s.reset_input_buffer()
def read_sensor_data(self):
if self.sensor_data_ser == None:
return
try:
buffer_clear = False
while self.sensor_data_ser.in_waiting > 0:
buffer_clear = True
sensor_readline = self.sensor_data_ser.readline()
if len(sensor_readline) <= self.sensor_data_max_len:
self.sensor_list.append(sensor_readline.decode('utf-8')[:-2])
else:
self.sensor_list.append(sensor_readline.decode('utf-8')[:self.sensor_data_max_len])
self.sensor_list.append(sensor_readline.decode('utf-8')[self.sensor_data_max_len:-2])
if buffer_clear:
self.sensor_data = self.sensor_list.copy()
self.sensor_list.clear()
self.sensor_data_ser.reset_input_buffer()
except Exception as e:
print(f"[base_ctrl.read_sensor_data] error: {e}")
def parse_lidar_frame(self, data):
# header = data[0]
# verlen = data[1]
# speed = data[3] << 8 | data[2]
start_angle = (data[5] << 8 | data[4]) * 0.01
# print(start)
# end_angle = (data[43] << 8 | data[42]) * 0.01
for i in range(0, self.ANGLE_PER_FRAME):
offset = 6 + i * 3
distance = data[offset+1] << 8 | data[offset]
confidence = data[offset+2]
# lidar_angles.append(np.radians(start_angle + i * 0.167))
self.lidar_angles.append(np.radians(start_angle + i * 0.83333 + 180))
# lidar_angles.append(np.radians(start_angle + end_angle))
self.lidar_distances.append(distance)
# end_angle = (data[43] << 8 | data[42]) * 0.01
# timestamp = data[45] << 8 | data[44]
# crc = data[46]
return start_angle
def lidar_data_recv(self):
if self.lidar_ser == None:
return
try:
while True:
self.header = self.lidar_ser.read(1)
if self.header == b'\x54':
# Read the rest of the data
data = self.header + self.lidar_ser.read(46)
hex_data = [int(hex(byte), 16) for byte in data]
start_angle = self.parse_lidar_frame(hex_data)
if self.last_start_angle > start_angle:
break
self.last_start_angle = start_angle
else:
self.lidar_ser.flushInput()
self.last_start_angle = start_angle
self.lidar_angles_show = self.lidar_angles.copy()
self.lidar_distances_show = self.lidar_distances.copy()
self.lidar_angles.clear()
self.lidar_distances.clear()
except Exception as e:
print(f"[base_ctrl.lidar_data_recv] error: {e}")
self.lidar_ser = serial.Serial(glob.glob('/dev/ttyACM*')[0], 230400, timeout=1)
class BaseController:
def __init__(self, uart_dev_set, buad_set):
self.ser = serial.Serial(uart_dev_set, buad_set, timeout=1)
self.rl = ReadLine(self.ser)
self.command_queue = queue.Queue()
self.command_thread = threading.Thread(target=self.process_commands, daemon=True)
self.command_thread.start()
self.base_light_status = 0
self.head_light_status = 0
self.data_buffer = None
self.base_data = None
self.use_lidar = f['base_config']['use_lidar']
self.extra_sensor = f['base_config']['extra_sensor']
def feedback_data(self):
try:
while self.rl.s.in_waiting > 0:
self.data_buffer = json.loads(self.rl.readline().decode('utf-8'))
if 'T' in self.data_buffer:
self.base_data = self.data_buffer
self.data_buffer = None
if self.base_data["T"] == 1003:
print(self.base_data)
return self.base_data
self.rl.clear_buffer()
self.data_buffer = json.loads(self.rl.readline().decode('utf-8'))
self.base_data = self.data_buffer
return self.base_data
except Exception as e:
self.rl.clear_buffer()
print(f"[base_ctrl.feedback_data] error: {e}")
def on_data_received(self):
self.ser.reset_input_buffer()
data_read = json.loads(self.rl.readline().decode('utf-8'))
return data_read
def send_command(self, data):
self.command_queue.put(data)
def process_commands(self):
while True:
data = self.command_queue.get()
self.ser.write((json.dumps(data) + '\n').encode("utf-8"))
def base_json_ctrl(self, input_json):
self.send_command(input_json)
def gimbal_emergency_stop(self):
data = {"T":0}
self.send_command(data)
def base_speed_ctrl(self, input_left, input_right):
data = {"T":1,"L":input_left,"R":input_right}
self.send_command(data)
def gimbal_ctrl(self, input_x, input_y, input_speed, input_acceleration):
data = {"T":133,"X":input_x,"Y":input_y,"SPD":input_speed,"ACC":input_acceleration}
self.send_command(data)
def gimbal_base_ctrl(self, input_x, input_y, input_speed):
data = {"T":141,"X":input_x,"Y":input_y,"SPD":input_speed}
self.send_command(data)
def base_oled(self, input_line, input_text):
data = {"T":3,"lineNum":input_line,"Text":input_text}
self.send_command(data)
def base_default_oled(self):
data = {"T":-3}
self.send_command(data)
def bus_servo_id_set(self, old_id, new_id):
# data = {"T":54,"old":old_id,"new":new_id}
data = {"T":f['cmd_config']['cmd_set_servo_id'],"raw":old_id,"new":new_id}
self.send_command(data)
def bus_servo_torque_lock(self, input_id, input_status):
# data = {"T":55,"id":input_id,"status":input_status}
data = {"T":f['cmd_config']['cmd_servo_torque'],"id":input_id,"cmd":input_status}
self.send_command(data)
def bus_servo_mid_set(self, input_id):
# data = {"T":58,"id":input_id}
data = {"T":f['cmd_config']['cmd_set_servo_mid'],"id":input_id}
self.send_command(data)
def lights_ctrl(self, pwmA, pwmB):
data = {"T":132,"IO4":pwmA,"IO5":pwmB}
self.send_command(data)
self.base_light_status = pwmA
self.head_light_status = pwmB
def base_lights_ctrl(self):
if self.base_light_status != 0:
self.base_light_status = 0
else:
self.base_light_status = 255
self.lights_ctrl(self.base_light_status, self.head_light_status)
def gimbal_dev_close(self):
self.ser.close()
def breath_light(self, input_time):
breath_start_time = time.time()
while time.time() - breath_start_time < input_time:
for i in range(0, 128, 10):
self.lights_ctrl(i, 128-i)
time.sleep(0.1)
for i in range(0, 128, 10):
self.lights_ctrl(128-i, i)
time.sleep(0.1)
self.lights_ctrl(0, 0)
if __name__ == '__main__':
# RPi5
base = BaseController('/dev/ttyAMA0', 115200)
# RPi4B
# base = BaseController('/dev/serial0', 115200)
# breath light for 15s
base.breath_light(15)
# gimble ctrl, look forward
# x y spd acc
base.gimbal_ctrl(0, 0, 10, 0)
# x(-180 ~ 180)
# x- look left
# x+ look right
# y(-30 ~ 90)
# y- look down
# y+ look up

@ -0,0 +1,61 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# camera_pi.py
#
#
#
import time
import io
import threading
import picamera
class Camera(object):
thread = None # background thread that reads frames from camera
frame = None # current frame is stored here by background thread
last_access = 0 # time of last client access to the camera
def initialize(self):
if Camera.thread is None:
# start background frame thread
Camera.thread = threading.Thread(target=self._thread)
Camera.thread.start()
# wait until frames start to be available
while self.frame is None:
time.sleep(0)
def get_frame(self):
Camera.last_access = time.time()
self.initialize()
return self.frame
@classmethod
def _thread(cls):
with picamera.PiCamera() as camera:
# camera setup
camera.resolution = (320, 240)
camera.hflip = True
camera.vflip = True
# let camera warm up
camera.start_preview()
time.sleep(2)
stream = io.BytesIO()
for foo in camera.capture_continuous(stream, 'jpeg',
use_video_port=True):
# store frame
stream.seek(0)
cls.frame = stream.read()
# reset stream for next frame
stream.seek(0)
stream.truncate()
# if there hasn't been any clients asking for frames in
# the last 10 seconds stop the thread
if time.time() - cls.last_access > 10:
break
cls.thread = None

@ -0,0 +1,147 @@
#coding:utf-8
import os
import time
import RPi.GPIO as GPIO
from base_ctrl import BaseController
#######################################
#############信号引脚定义##############
#######################################
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
def is_raspberry_pi5():
with open('/proc/cpuinfo', 'r') as file:
for line in file:
if 'Model' in line:
if 'Raspberry Pi 5' in line:
return True
else:
return False
base = BaseController('/dev/ttyAMA0', 115200)
class CAR:
def __init__(self):
self.LED0 = 10 #LED0的IO口定义
self.LED1 = 9 #LED1的IO口定义
self.LED2 = 25 #LED2的IO口定义
self.ENA = 13 #//L298 使能A
self.ENB = 20 #//L298 使能B
self.IN1 = 19 #//电机接口1
self.IN2 = 16 #//电机接口2
self.IN3 = 21 #//电机接口3
self.IN4 = 26 #//电机接口4
GPIO.setup(self.LED0, GPIO.OUT, initial=GPIO.HIGH) ##led0初始化为高电平
GPIO.setup(self.LED1, GPIO.OUT, initial=GPIO.HIGH) ##led1初始化为高电平
GPIO.setup(self.LED2, GPIO.OUT, initial=GPIO.HIGH) ##led2初始化为高电平
GPIO.setup(self.ENA, GPIO.OUT, initial=GPIO.LOW) ##ENA初始化为低电平
GPIO.setup(self.ENB, GPIO.OUT, initial=GPIO.LOW) ##ENB初始化为低电平
GPIO.setup(self.IN1, GPIO.OUT, initial=GPIO.LOW) ##IN1初始化为低电平
GPIO.setup(self.IN2, GPIO.OUT, initial=GPIO.LOW) ##IN2初始化为低电平
GPIO.setup(self.IN3, GPIO.OUT, initial=GPIO.LOW) ##IN3初始化为低电平
GPIO.setup(self.IN4, GPIO.OUT, initial=GPIO.LOW) ##IN4初始化为低电平
def led(self):
GPIO.output(self.LED0,False)
GPIO.output(self.LED1,False)
GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 亮 亮 亮
time.sleep(0.5)
GPIO.output(self.LED0,True)
GPIO.output(self.LED1,False)
GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 灭 亮 亮
time.sleep(0.5)
GPIO.output(self.LED0,False)
GPIO.output(self.LED1,True)
GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 亮 灭 亮
time.sleep(0.5)
GPIO.output(self.LED0,False)
GPIO.output(self.LED1,False)
GPIO.output(self.LED2,True)###LED0,LED1,LED2 = 亮 亮 灭
time.sleep(0.5)
GPIO.output(self.LED0,False)
GPIO.output(self.LED1,False)
GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 亮 亮 亮
time.sleep(0.5)
GPIO.output(self.LED0,True)
GPIO.output(self.LED1,True)
GPIO.output(self.LED2,True)###LED0,LED1,LED2 = 灭 灭 灭
time.sleep(0.5)
print("run: led")
def led_light(self):
GPIO.output(self.LED0,False)
GPIO.output(self.LED1,False)
GPIO.output(self.LED2,False)###LED0,LED1,LED2 = 亮 亮 亮
print("run: led_light")
def led_dark(self):
GPIO.output(self.LED0,True)
GPIO.output(self.LED1,True)
GPIO.output(self.LED2,True)###LED0,LED1,LED2 = 灭 灭 灭
print("run: led_dark")
def stop(self): # 停止运行
GPIO.output(self.ENA,False)
GPIO.output(self.ENB,False)
GPIO.output(self.IN1,False)
GPIO.output(self.IN2,False)
GPIO.output(self.IN3,False)
GPIO.output(self.IN4,False)
print("run: stop move")
def Q(self): # 停止的快捷键
self.stop()
def q(self):
self.stop()
def forward(self): # 前进
base.send_command({"T":1,"L":0.2,"R":0.2})
time.sleep(2)
base.send_command({"T":1,"L":0,"R":0})
print("run: move !!!!forward")
def W(self): # 前进的快捷键
self.forward()
def w(self):
self.forward()
def back(self): # 后退
GPIO.output(self.ENA,True)
GPIO.output(self.ENB,True)
GPIO.output(self.IN1,True)
GPIO.output(self.IN2,False)
GPIO.output(self.IN3,True)
GPIO.output(self.IN4,False)
print("run: move back")
def S(self): # 后退的快捷键
self.back()
def s(self):
self.back()
def left(self): # 左转
GPIO.output(self.ENA,True)
GPIO.output(self.ENB,True)
GPIO.output(self.IN1,False)
GPIO.output(self.IN2,True)
GPIO.output(self.IN3,True)
GPIO.output(self.IN4,False)
print("run: move left")
def A(self): # 左转的快捷键
self.left()
def a(self):
self.left()
def right(self): # 右转
GPIO.output(self.ENA,True)
GPIO.output(self.ENB,True)
GPIO.output(self.IN1,True)
GPIO.output(self.IN2,False)
GPIO.output(self.IN3,False)
GPIO.output(self.IN4,True)
print("run: move right")
def D(self): # 右转的快捷键
self.right()
def d(self):
self.right()

@ -0,0 +1,107 @@
args_config:
arm_default_e: 60
arm_default_r: 0
arm_default_z: 24
max_rate: 1.0
max_speed: 1.3
mid_rate: 0.66
min_rate: 0.3
slow_speed: 0.2
audio_config:
audio_output: true
default_volume: 1.0
min_time_bewteen_play: 1
speed_rate: 180
base_config:
add_osd: false
extra_sensor: false
main_type: 2
module_type: 0
robot_name: UGV Rover
sbc_version: 0.93
use_lidar: false
cmd_config:
cmd_arm_ctrl_ui: 144
cmd_gimbal_base_ctrl: 141
cmd_gimbal_ctrl: 133
cmd_gimbal_steady: 137
cmd_movition_ctrl: 1
cmd_pwm_ctrl: 11
cmd_servo_torque: 210
cmd_set_servo_id: 501
cmd_set_servo_mid: 502
code:
base_ct: 10410
base_of: 10407
base_on: 10408
cv_auto: 10307
cv_clor: 10305
cv_face: 10303
cv_moti: 10302
cv_none: 10301
cv_objs: 10304
head_ct: 10409
led_aut: 10405
led_off: 10404
led_ton: 10406
max_res: 10101
mc_lock: 10501
mc_unlo: 10502
mid_res: 10102
min_res: 10103
mp_face: 10308
mp_hand: 10306
mp_pose: 10309
pic_cap: 10201
re_capt: 10402
re_none: 10401
re_reco: 10403
release: 10902
s_panid: 10901
s_tilid: 10904
set_mid: 10903
vid_end: 10203
vid_sta: 10202
zoom_x1: 10104
zoom_x2: 10105
zoom_x4: 10106
cv:
aimed_error: 8
color_lower:
- 101
- 50
- 38
color_upper:
- 110
- 255
- 255
default_color: blue
min_radius: 12
sampling_rad: 25
track_acc_rate: 0.4
track_color_iterate: 0.023
track_faces_iterate: 0.045
track_spd_rate: 60
fb:
base_light: 115
base_voltage: 112
cpu_load: 106
cpu_temp: 107
cv_movtion_mode: 114
detect_react: 103
detect_type: 101
led_mode: 102
pan_angle: 109
picture_size: 104
ram_usage: 108
tilt_angle: 110
video_fps: 113
video_size: 105
wifi_rssi: 111
sbc_config:
disabled_http_log: true
feedback_interval: 0.001
video:
default_quality: 20
default_res_h: 480
default_res_w: 640

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

File diff suppressed because one or more lines are too long

@ -0,0 +1,335 @@
/*
usage:
p = new Player({
useWorker: <bool>,
workerFile: <defaults to "Decoder.js"> // give path to Decoder.js
webgl: true | false | "auto" // defaults to "auto"
});
// canvas property represents the canvas node
// put it somewhere in the dom
p.canvas;
p.webgl; // contains the used rendering mode. if you pass auto to webgl you can see what auto detection resulted in
p.decode(<binary>);
*/
// universal module definition
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(["./Decoder", "./YUVCanvas"], factory);
} else if (typeof exports === 'object') {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory(require("./Decoder"), require("./YUVCanvas"));
} else {
// Browser globals (root is window)
root.Player = factory(root.Decoder, root.YUVCanvas);
}
}(this, function (Decoder, WebGLCanvas) {
"use strict";
var nowValue = Decoder.nowValue;
var Player = function(parOptions){
var self = this;
this._config = parOptions || {};
this.render = true;
if (this._config.render === false){
this.render = false;
};
this.nowValue = nowValue;
this._config.workerFile = this._config.workerFile || "Decoder.js";
if (this._config.preserveDrawingBuffer){
this._config.contextOptions = this._config.contextOptions || {};
this._config.contextOptions.preserveDrawingBuffer = true;
};
var webgl = "auto";
if (this._config.webgl === true){
webgl = true;
}else if (this._config.webgl === false){
webgl = false;
};
if (webgl == "auto"){
webgl = true;
try{
if (!window.WebGLRenderingContext) {
// the browser doesn't even know what WebGL is
webgl = false;
} else {
var canvas = document.createElement('canvas');
var ctx = canvas.getContext("webgl");
if (!ctx) {
// browser supports WebGL but initialization failed.
webgl = false;
};
};
}catch(e){
webgl = false;
};
};
this.webgl = webgl;
// choose functions
if (this.webgl){
this.createCanvasObj = this.createCanvasWebGL;
this.renderFrame = this.renderFrameWebGL;
}else{
this.createCanvasObj = this.createCanvasRGB;
this.renderFrame = this.renderFrameRGB;
};
var lastWidth;
var lastHeight;
var onPictureDecoded = function(buffer, width, height, infos) {
self.onPictureDecoded(buffer, width, height, infos);
var startTime = nowValue();
if (!buffer || !self.render) {
return;
};
self.renderFrame({
canvasObj: self.canvasObj,
data: buffer,
width: width,
height: height
});
if (self.onRenderFrameComplete){
self.onRenderFrameComplete({
data: buffer,
width: width,
height: height,
infos: infos,
canvasObj: self.canvasObj
});
};
};
// provide size
if (!this._config.size){
this._config.size = {};
};
this._config.size.width = this._config.size.width || 200;
this._config.size.height = this._config.size.height || 200;
if (this._config.useWorker){
var worker = new Worker(this._config.workerFile);
this.worker = worker;
worker.addEventListener('message', function(e) {
var data = e.data;
if (data.consoleLog){
console.log(data.consoleLog);
return;
};
onPictureDecoded.call(self, new Uint8Array(data.buf, 0, data.length), data.width, data.height, data.infos);
}, false);
worker.postMessage({type: "Broadway.js - Worker init", options: {
rgb: !webgl,
memsize: this.memsize,
reuseMemory: this._config.reuseMemory ? true : false
}});
if (this._config.transferMemory){
this.decode = function(parData, parInfo){
// no copy
// instead we are transfering the ownership of the buffer
// dangerous!!!
worker.postMessage({buf: parData.buffer, offset: parData.byteOffset, length: parData.length, info: parInfo}, [parData.buffer]); // Send data to our worker.
};
}else{
this.decode = function(parData, parInfo){
// Copy the sample so that we only do a structured clone of the
// region of interest
var copyU8 = new Uint8Array(parData.length);
copyU8.set( parData, 0, parData.length );
worker.postMessage({buf: copyU8.buffer, offset: 0, length: parData.length, info: parInfo}, [copyU8.buffer]); // Send data to our worker.
};
};
if (this._config.reuseMemory){
this.recycleMemory = function(parArray){
//this.beforeRecycle();
worker.postMessage({reuse: parArray.buffer}, [parArray.buffer]); // Send data to our worker.
//this.afterRecycle();
};
}
}else{
this.decoder = new Decoder({
rgb: !webgl
});
this.decoder.onPictureDecoded = onPictureDecoded;
this.decode = function(parData, parInfo){
self.decoder.decode(parData, parInfo);
};
};
if (this.render){
this.canvasObj = this.createCanvasObj({
contextOptions: this._config.contextOptions
});
this.canvas = this.canvasObj.canvas;
};
this.domNode = this.canvas;
lastWidth = this._config.size.width;
lastHeight = this._config.size.height;
};
Player.prototype = {
onPictureDecoded: function(buffer, width, height, infos){},
// call when memory of decoded frames is not used anymore
recycleMemory: function(buf){
},
/*beforeRecycle: function(){},
afterRecycle: function(){},*/
// for both functions options is:
//
// width
// height
// enableScreenshot
//
// returns a object that has a property canvas which is a html5 canvas
createCanvasWebGL: function(options){
var canvasObj = this._createBasicCanvasObj(options);
canvasObj.contextOptions = options.contextOptions;
return canvasObj;
},
createCanvasRGB: function(options){
var canvasObj = this._createBasicCanvasObj(options);
return canvasObj;
},
// part that is the same for webGL and RGB
_createBasicCanvasObj: function(options){
options = options || {};
var obj = {};
var width = options.width;
if (!width){
width = this._config.size.width;
};
var height = options.height;
if (!height){
height = this._config.size.height;
};
obj.canvas = document.createElement('canvas');
obj.canvas.width = width;
obj.canvas.height = height;
obj.canvas.style.backgroundColor = "#0D0E1B";
return obj;
},
// options:
//
// canvas
// data
renderFrameWebGL: function(options){
var canvasObj = options.canvasObj;
var width = options.width || canvasObj.canvas.width;
var height = options.height || canvasObj.canvas.height;
if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height || !canvasObj.webGLCanvas){
canvasObj.canvas.width = width;
canvasObj.canvas.height = height;
canvasObj.webGLCanvas = new WebGLCanvas({
canvas: canvasObj.canvas,
contextOptions: canvasObj.contextOptions,
width: width,
height: height
});
};
var ylen = width * height;
var uvlen = (width / 2) * (height / 2);
canvasObj.webGLCanvas.drawNextOutputPicture({
yData: options.data.subarray(0, ylen),
uData: options.data.subarray(ylen, ylen + uvlen),
vData: options.data.subarray(ylen + uvlen, ylen + uvlen + uvlen)
});
var self = this;
self.recycleMemory(options.data);
},
renderFrameRGB: function(options){
var canvasObj = options.canvasObj;
var width = options.width || canvasObj.canvas.width;
var height = options.height || canvasObj.canvas.height;
if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height){
canvasObj.canvas.width = width;
canvasObj.canvas.height = height;
};
var ctx = canvasObj.ctx;
var imgData = canvasObj.imgData;
if (!ctx){
canvasObj.ctx = canvasObj.canvas.getContext('2d');
ctx = canvasObj.ctx;
canvasObj.imgData = ctx.createImageData(width, height);
imgData = canvasObj.imgData;
};
imgData.data.set(options.data);
ctx.putImageData(imgData, 0, 0);
var self = this;
self.recycleMemory(options.data);
}
};
return Player;
}));

@ -0,0 +1,551 @@
//
// Copyright (c) 2015 Paperspace Co. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
// universal module definition
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define([], factory);
} else if (typeof exports === 'object') {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory();
} else {
// Browser globals (root is window)
root.YUVCanvas = factory();
}
}(this, function () {
/**
* This class can be used to render output pictures from an H264bsdDecoder to a canvas element.
* If available the content is rendered using WebGL.
*/
function YUVCanvas(parOptions) {
parOptions = parOptions || {};
this.canvasElement = parOptions.canvas || document.createElement("canvas");
this.contextOptions = parOptions.contextOptions;
this.type = parOptions.type || "yuv420";
this.customYUV444 = parOptions.customYUV444;
this.conversionType = parOptions.conversionType || "rec601";
this.width = parOptions.width || 640;
this.height = parOptions.height || 320;
this.animationTime = parOptions.animationTime || 0;
this.canvasElement.width = this.width;
this.canvasElement.height = this.height;
this.initContextGL();
if(this.contextGL) {
this.initProgram();
this.initBuffers();
this.initTextures();
};
/**
* Draw the next output picture using WebGL
*/
if (this.type === "yuv420"){
this.drawNextOuptutPictureGL = function(par) {
var gl = this.contextGL;
var texturePosBuffer = this.texturePosBuffer;
var uTexturePosBuffer = this.uTexturePosBuffer;
var vTexturePosBuffer = this.vTexturePosBuffer;
var yTextureRef = this.yTextureRef;
var uTextureRef = this.uTextureRef;
var vTextureRef = this.vTextureRef;
var yData = par.yData;
var uData = par.uData;
var vData = par.vData;
var width = this.width;
var height = this.height;
var yDataPerRow = par.yDataPerRow || width;
var yRowCnt = par.yRowCnt || height;
var uDataPerRow = par.uDataPerRow || (width / 2);
var uRowCnt = par.uRowCnt || (height / 2);
var vDataPerRow = par.vDataPerRow || uDataPerRow;
var vRowCnt = par.vRowCnt || uRowCnt;
gl.viewport(0, 0, width, height);
var tTop = 0;
var tLeft = 0;
var tBottom = height / yRowCnt;
var tRight = width / yDataPerRow;
var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW);
if (this.customYUV444){
tBottom = height / uRowCnt;
tRight = width / uDataPerRow;
}else{
tBottom = (height / 2) / uRowCnt;
tRight = (width / 2) / uDataPerRow;
};
var uTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, uTexturePosValues, gl.DYNAMIC_DRAW);
if (this.customYUV444){
tBottom = height / vRowCnt;
tRight = width / vDataPerRow;
}else{
tBottom = (height / 2) / vRowCnt;
tRight = (width / 2) / vDataPerRow;
};
var vTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vTexturePosValues, gl.DYNAMIC_DRAW);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, yTextureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, yDataPerRow, yRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, yData);
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, uTextureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, uDataPerRow, uRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uData);
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, vTextureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, vDataPerRow, vRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, vData);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
};
}else if (this.type === "yuv422"){
this.drawNextOuptutPictureGL = function(par) {
var gl = this.contextGL;
var texturePosBuffer = this.texturePosBuffer;
var textureRef = this.textureRef;
var data = par.data;
var width = this.width;
var height = this.height;
var dataPerRow = par.dataPerRow || (width * 2);
var rowCnt = par.rowCnt || height;
gl.viewport(0, 0, width, height);
var tTop = 0;
var tLeft = 0;
var tBottom = height / rowCnt;
var tRight = width / (dataPerRow / 2);
var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW);
gl.uniform2f(gl.getUniformLocation(this.shaderProgram, 'resolution'), dataPerRow, height);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, textureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, dataPerRow, rowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
};
};
};
/**
* Returns true if the canvas supports WebGL
*/
YUVCanvas.prototype.isWebGL = function() {
return this.contextGL;
};
/**
* Create the GL context from the canvas element
*/
YUVCanvas.prototype.initContextGL = function() {
var canvas = this.canvasElement;
var gl = null;
var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"];
var nameIndex = 0;
while(!gl && nameIndex < validContextNames.length) {
var contextName = validContextNames[nameIndex];
try {
if (this.contextOptions){
gl = canvas.getContext(contextName, this.contextOptions);
}else{
gl = canvas.getContext(contextName);
};
} catch (e) {
gl = null;
}
if(!gl || typeof gl.getParameter !== "function") {
gl = null;
}
++nameIndex;
};
this.contextGL = gl;
};
/**
* Initialize GL shader program
*/
YUVCanvas.prototype.initProgram = function() {
var gl = this.contextGL;
// vertex shader is the same for all types
var vertexShaderScript;
var fragmentShaderScript;
if (this.type === "yuv420"){
vertexShaderScript = [
'attribute vec4 vertexPos;',
'attribute vec4 texturePos;',
'attribute vec4 uTexturePos;',
'attribute vec4 vTexturePos;',
'varying vec2 textureCoord;',
'varying vec2 uTextureCoord;',
'varying vec2 vTextureCoord;',
'void main()',
'{',
' gl_Position = vertexPos;',
' textureCoord = texturePos.xy;',
' uTextureCoord = uTexturePos.xy;',
' vTextureCoord = vTexturePos.xy;',
'}'
].join('\n');
fragmentShaderScript = [
'precision highp float;',
'varying highp vec2 textureCoord;',
'varying highp vec2 uTextureCoord;',
'varying highp vec2 vTextureCoord;',
'uniform sampler2D ySampler;',
'uniform sampler2D uSampler;',
'uniform sampler2D vSampler;',
'uniform mat4 YUV2RGB;',
'void main(void) {',
' highp float y = texture2D(ySampler, textureCoord).r;',
' highp float u = texture2D(uSampler, uTextureCoord).r;',
' highp float v = texture2D(vSampler, vTextureCoord).r;',
' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;',
'}'
].join('\n');
}else if (this.type === "yuv422"){
vertexShaderScript = [
'attribute vec4 vertexPos;',
'attribute vec4 texturePos;',
'varying vec2 textureCoord;',
'void main()',
'{',
' gl_Position = vertexPos;',
' textureCoord = texturePos.xy;',
'}'
].join('\n');
fragmentShaderScript = [
'precision highp float;',
'varying highp vec2 textureCoord;',
'uniform sampler2D sampler;',
'uniform highp vec2 resolution;',
'uniform mat4 YUV2RGB;',
'void main(void) {',
' highp float texPixX = 1.0 / resolution.x;',
' highp float logPixX = 2.0 / resolution.x;', // half the resolution of the texture
' highp float logHalfPixX = 4.0 / resolution.x;', // half of the logical resolution so every 4th pixel
' highp float steps = floor(textureCoord.x / logPixX);',
' highp float uvSteps = floor(textureCoord.x / logHalfPixX);',
' highp float y = texture2D(sampler, vec2((logPixX * steps) + texPixX, textureCoord.y)).r;',
' highp float u = texture2D(sampler, vec2((logHalfPixX * uvSteps), textureCoord.y)).r;',
' highp float v = texture2D(sampler, vec2((logHalfPixX * uvSteps) + texPixX + texPixX, textureCoord.y)).r;',
//' highp float y = texture2D(sampler, textureCoord).r;',
//' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;',
' gl_FragColor = vec4(y, u, v, 1.0) * YUV2RGB;',
'}'
].join('\n');
};
var YUV2RGB = [];
if (this.conversionType == "rec709") {
// ITU-T Rec. 709
YUV2RGB = [
1.16438, 0.00000, 1.79274, -0.97295,
1.16438, -0.21325, -0.53291, 0.30148,
1.16438, 2.11240, 0.00000, -1.13340,
0, 0, 0, 1,
];
} else {
// assume ITU-T Rec. 601
YUV2RGB = [
1.16438, 0.00000, 1.59603, -0.87079,
1.16438, -0.39176, -0.81297, 0.52959,
1.16438, 2.01723, 0.00000, -1.08139,
0, 0, 0, 1
];
};
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderScript);
gl.compileShader(vertexShader);
if(!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader));
}
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderScript);
gl.compileShader(fragmentShader);
if(!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader));
}
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if(!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.log('Program failed to compile: ' + gl.getProgramInfoLog(program));
}
gl.useProgram(program);
var YUV2RGBRef = gl.getUniformLocation(program, 'YUV2RGB');
gl.uniformMatrix4fv(YUV2RGBRef, false, YUV2RGB);
this.shaderProgram = program;
};
/**
* Initialize vertex buffers and attach to shader program
*/
YUVCanvas.prototype.initBuffers = function() {
var gl = this.contextGL;
var program = this.shaderProgram;
var vertexPosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW);
var vertexPosRef = gl.getAttribLocation(program, 'vertexPos');
gl.enableVertexAttribArray(vertexPosRef);
gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0);
if (this.animationTime){
var animationTime = this.animationTime;
var timePassed = 0;
var stepTime = 15;
var aniFun = function(){
timePassed += stepTime;
var mul = ( 1 * timePassed ) / animationTime;
if (timePassed >= animationTime){
mul = 1;
}else{
setTimeout(aniFun, stepTime);
};
var neg = -1 * mul;
var pos = 1 * mul;
var vertexPosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([pos, pos, neg, pos, pos, neg, neg, neg]), gl.STATIC_DRAW);
var vertexPosRef = gl.getAttribLocation(program, 'vertexPos');
gl.enableVertexAttribArray(vertexPosRef);
gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0);
try{
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
}catch(e){};
};
aniFun();
};
var texturePosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
var texturePosRef = gl.getAttribLocation(program, 'texturePos');
gl.enableVertexAttribArray(texturePosRef);
gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0);
this.texturePosBuffer = texturePosBuffer;
if (this.type === "yuv420"){
var uTexturePosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
var uTexturePosRef = gl.getAttribLocation(program, 'uTexturePos');
gl.enableVertexAttribArray(uTexturePosRef);
gl.vertexAttribPointer(uTexturePosRef, 2, gl.FLOAT, false, 0, 0);
this.uTexturePosBuffer = uTexturePosBuffer;
var vTexturePosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
var vTexturePosRef = gl.getAttribLocation(program, 'vTexturePos');
gl.enableVertexAttribArray(vTexturePosRef);
gl.vertexAttribPointer(vTexturePosRef, 2, gl.FLOAT, false, 0, 0);
this.vTexturePosBuffer = vTexturePosBuffer;
};
};
/**
* Initialize GL textures and attach to shader program
*/
YUVCanvas.prototype.initTextures = function() {
var gl = this.contextGL;
var program = this.shaderProgram;
if (this.type === "yuv420"){
var yTextureRef = this.initTexture();
var ySamplerRef = gl.getUniformLocation(program, 'ySampler');
gl.uniform1i(ySamplerRef, 0);
this.yTextureRef = yTextureRef;
var uTextureRef = this.initTexture();
var uSamplerRef = gl.getUniformLocation(program, 'uSampler');
gl.uniform1i(uSamplerRef, 1);
this.uTextureRef = uTextureRef;
var vTextureRef = this.initTexture();
var vSamplerRef = gl.getUniformLocation(program, 'vSampler');
gl.uniform1i(vSamplerRef, 2);
this.vTextureRef = vTextureRef;
}else if (this.type === "yuv422"){
// only one texture for 422
var textureRef = this.initTexture();
var samplerRef = gl.getUniformLocation(program, 'sampler');
gl.uniform1i(samplerRef, 0);
this.textureRef = textureRef;
};
};
/**
* Create and configure a single texture
*/
YUVCanvas.prototype.initTexture = function() {
var gl = this.contextGL;
var textureRef = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, textureRef);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
return textureRef;
};
/**
* Draw picture data to the canvas.
* If this object is using WebGL, the data must be an I420 formatted ArrayBuffer,
* Otherwise, data must be an RGBA formatted ArrayBuffer.
*/
YUVCanvas.prototype.drawNextOutputPicture = function(width, height, croppingParams, data) {
var gl = this.contextGL;
if(gl) {
this.drawNextOuptutPictureGL(width, height, croppingParams, data);
} else {
this.drawNextOuptutPictureRGBA(width, height, croppingParams, data);
}
};
/**
* Draw next output picture using ARGB data on a 2d canvas.
*/
YUVCanvas.prototype.drawNextOuptutPictureRGBA = function(width, height, croppingParams, data) {
var canvas = this.canvasElement;
var croppingParams = null;
var argbData = data;
var ctx = canvas.getContext('2d');
var imageData = ctx.getImageData(0, 0, width, height);
imageData.data.set(argbData);
if(croppingParams === null) {
ctx.putImageData(imageData, 0, 0);
} else {
ctx.putImageData(imageData, -croppingParams.left, -croppingParams.top, 0, 0, croppingParams.width, croppingParams.height);
}
};
return YUVCanvas;
}));

Binary file not shown.

@ -0,0 +1,9 @@
body{
background: white;
color: black;
padding:1%;
text-align: center;
}

File diff suppressed because one or more lines are too long

@ -0,0 +1,335 @@
/*
usage:
p = new Player({
useWorker: <bool>,
workerFile: <defaults to "Decoder.js"> // give path to Decoder.js
webgl: true | false | "auto" // defaults to "auto"
});
// canvas property represents the canvas node
// put it somewhere in the dom
p.canvas;
p.webgl; // contains the used rendering mode. if you pass auto to webgl you can see what auto detection resulted in
p.decode(<binary>);
*/
// universal module definition
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(["./Decoder", "./YUVCanvas"], factory);
} else if (typeof exports === 'object') {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory(require("./Decoder"), require("./YUVCanvas"));
} else {
// Browser globals (root is window)
root.Player = factory(root.Decoder, root.YUVCanvas);
}
}(this, function (Decoder, WebGLCanvas) {
"use strict";
var nowValue = Decoder.nowValue;
var Player = function(parOptions){
var self = this;
this._config = parOptions || {};
this.render = true;
if (this._config.render === false){
this.render = false;
};
this.nowValue = nowValue;
this._config.workerFile = this._config.workerFile || "Decoder.js";
if (this._config.preserveDrawingBuffer){
this._config.contextOptions = this._config.contextOptions || {};
this._config.contextOptions.preserveDrawingBuffer = true;
};
var webgl = "auto";
if (this._config.webgl === true){
webgl = true;
}else if (this._config.webgl === false){
webgl = false;
};
if (webgl == "auto"){
webgl = true;
try{
if (!window.WebGLRenderingContext) {
// the browser doesn't even know what WebGL is
webgl = false;
} else {
var canvas = document.createElement('canvas');
var ctx = canvas.getContext("webgl");
if (!ctx) {
// browser supports WebGL but initialization failed.
webgl = false;
};
};
}catch(e){
webgl = false;
};
};
this.webgl = webgl;
// choose functions
if (this.webgl){
this.createCanvasObj = this.createCanvasWebGL;
this.renderFrame = this.renderFrameWebGL;
}else{
this.createCanvasObj = this.createCanvasRGB;
this.renderFrame = this.renderFrameRGB;
};
var lastWidth;
var lastHeight;
var onPictureDecoded = function(buffer, width, height, infos) {
self.onPictureDecoded(buffer, width, height, infos);
var startTime = nowValue();
if (!buffer || !self.render) {
return;
};
self.renderFrame({
canvasObj: self.canvasObj,
data: buffer,
width: width,
height: height
});
if (self.onRenderFrameComplete){
self.onRenderFrameComplete({
data: buffer,
width: width,
height: height,
infos: infos,
canvasObj: self.canvasObj
});
};
};
// provide size
if (!this._config.size){
this._config.size = {};
};
this._config.size.width = this._config.size.width || 200;
this._config.size.height = this._config.size.height || 200;
if (this._config.useWorker){
var worker = new Worker(this._config.workerFile);
this.worker = worker;
worker.addEventListener('message', function(e) {
var data = e.data;
if (data.consoleLog){
console.log(data.consoleLog);
return;
};
onPictureDecoded.call(self, new Uint8Array(data.buf, 0, data.length), data.width, data.height, data.infos);
}, false);
worker.postMessage({type: "Broadway.js - Worker init", options: {
rgb: !webgl,
memsize: this.memsize,
reuseMemory: this._config.reuseMemory ? true : false
}});
if (this._config.transferMemory){
this.decode = function(parData, parInfo){
// no copy
// instead we are transfering the ownership of the buffer
// dangerous!!!
worker.postMessage({buf: parData.buffer, offset: parData.byteOffset, length: parData.length, info: parInfo}, [parData.buffer]); // Send data to our worker.
};
}else{
this.decode = function(parData, parInfo){
// Copy the sample so that we only do a structured clone of the
// region of interest
var copyU8 = new Uint8Array(parData.length);
copyU8.set( parData, 0, parData.length );
worker.postMessage({buf: copyU8.buffer, offset: 0, length: parData.length, info: parInfo}, [copyU8.buffer]); // Send data to our worker.
};
};
if (this._config.reuseMemory){
this.recycleMemory = function(parArray){
//this.beforeRecycle();
worker.postMessage({reuse: parArray.buffer}, [parArray.buffer]); // Send data to our worker.
//this.afterRecycle();
};
}
}else{
this.decoder = new Decoder({
rgb: !webgl
});
this.decoder.onPictureDecoded = onPictureDecoded;
this.decode = function(parData, parInfo){
self.decoder.decode(parData, parInfo);
};
};
if (this.render){
this.canvasObj = this.createCanvasObj({
contextOptions: this._config.contextOptions
});
this.canvas = this.canvasObj.canvas;
};
this.domNode = this.canvas;
lastWidth = this._config.size.width;
lastHeight = this._config.size.height;
};
Player.prototype = {
onPictureDecoded: function(buffer, width, height, infos){},
// call when memory of decoded frames is not used anymore
recycleMemory: function(buf){
},
/*beforeRecycle: function(){},
afterRecycle: function(){},*/
// for both functions options is:
//
// width
// height
// enableScreenshot
//
// returns a object that has a property canvas which is a html5 canvas
createCanvasWebGL: function(options){
var canvasObj = this._createBasicCanvasObj(options);
canvasObj.contextOptions = options.contextOptions;
return canvasObj;
},
createCanvasRGB: function(options){
var canvasObj = this._createBasicCanvasObj(options);
return canvasObj;
},
// part that is the same for webGL and RGB
_createBasicCanvasObj: function(options){
options = options || {};
var obj = {};
var width = options.width;
if (!width){
width = this._config.size.width;
};
var height = options.height;
if (!height){
height = this._config.size.height;
};
obj.canvas = document.createElement('canvas');
obj.canvas.width = width;
obj.canvas.height = height;
obj.canvas.style.backgroundColor = "#0D0E1B";
return obj;
},
// options:
//
// canvas
// data
renderFrameWebGL: function(options){
var canvasObj = options.canvasObj;
var width = options.width || canvasObj.canvas.width;
var height = options.height || canvasObj.canvas.height;
if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height || !canvasObj.webGLCanvas){
canvasObj.canvas.width = width;
canvasObj.canvas.height = height;
canvasObj.webGLCanvas = new WebGLCanvas({
canvas: canvasObj.canvas,
contextOptions: canvasObj.contextOptions,
width: width,
height: height
});
};
var ylen = width * height;
var uvlen = (width / 2) * (height / 2);
canvasObj.webGLCanvas.drawNextOutputPicture({
yData: options.data.subarray(0, ylen),
uData: options.data.subarray(ylen, ylen + uvlen),
vData: options.data.subarray(ylen + uvlen, ylen + uvlen + uvlen)
});
var self = this;
self.recycleMemory(options.data);
},
renderFrameRGB: function(options){
var canvasObj = options.canvasObj;
var width = options.width || canvasObj.canvas.width;
var height = options.height || canvasObj.canvas.height;
if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height){
canvasObj.canvas.width = width;
canvasObj.canvas.height = height;
};
var ctx = canvasObj.ctx;
var imgData = canvasObj.imgData;
if (!ctx){
canvasObj.ctx = canvasObj.canvas.getContext('2d');
ctx = canvasObj.ctx;
canvasObj.imgData = ctx.createImageData(width, height);
imgData = canvasObj.imgData;
};
imgData.data.set(options.data);
ctx.putImageData(imgData, 0, 0);
var self = this;
self.recycleMemory(options.data);
}
};
return Player;
}));

@ -0,0 +1,551 @@
//
// Copyright (c) 2015 Paperspace Co. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
// universal module definition
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define([], factory);
} else if (typeof exports === 'object') {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory();
} else {
// Browser globals (root is window)
root.YUVCanvas = factory();
}
}(this, function () {
/**
* This class can be used to render output pictures from an H264bsdDecoder to a canvas element.
* If available the content is rendered using WebGL.
*/
function YUVCanvas(parOptions) {
parOptions = parOptions || {};
this.canvasElement = parOptions.canvas || document.createElement("canvas");
this.contextOptions = parOptions.contextOptions;
this.type = parOptions.type || "yuv420";
this.customYUV444 = parOptions.customYUV444;
this.conversionType = parOptions.conversionType || "rec601";
this.width = parOptions.width || 640;
this.height = parOptions.height || 320;
this.animationTime = parOptions.animationTime || 0;
this.canvasElement.width = this.width;
this.canvasElement.height = this.height;
this.initContextGL();
if(this.contextGL) {
this.initProgram();
this.initBuffers();
this.initTextures();
};
/**
* Draw the next output picture using WebGL
*/
if (this.type === "yuv420"){
this.drawNextOuptutPictureGL = function(par) {
var gl = this.contextGL;
var texturePosBuffer = this.texturePosBuffer;
var uTexturePosBuffer = this.uTexturePosBuffer;
var vTexturePosBuffer = this.vTexturePosBuffer;
var yTextureRef = this.yTextureRef;
var uTextureRef = this.uTextureRef;
var vTextureRef = this.vTextureRef;
var yData = par.yData;
var uData = par.uData;
var vData = par.vData;
var width = this.width;
var height = this.height;
var yDataPerRow = par.yDataPerRow || width;
var yRowCnt = par.yRowCnt || height;
var uDataPerRow = par.uDataPerRow || (width / 2);
var uRowCnt = par.uRowCnt || (height / 2);
var vDataPerRow = par.vDataPerRow || uDataPerRow;
var vRowCnt = par.vRowCnt || uRowCnt;
gl.viewport(0, 0, width, height);
var tTop = 0;
var tLeft = 0;
var tBottom = height / yRowCnt;
var tRight = width / yDataPerRow;
var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW);
if (this.customYUV444){
tBottom = height / uRowCnt;
tRight = width / uDataPerRow;
}else{
tBottom = (height / 2) / uRowCnt;
tRight = (width / 2) / uDataPerRow;
};
var uTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, uTexturePosValues, gl.DYNAMIC_DRAW);
if (this.customYUV444){
tBottom = height / vRowCnt;
tRight = width / vDataPerRow;
}else{
tBottom = (height / 2) / vRowCnt;
tRight = (width / 2) / vDataPerRow;
};
var vTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vTexturePosValues, gl.DYNAMIC_DRAW);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, yTextureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, yDataPerRow, yRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, yData);
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, uTextureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, uDataPerRow, uRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uData);
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, vTextureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, vDataPerRow, vRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, vData);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
};
}else if (this.type === "yuv422"){
this.drawNextOuptutPictureGL = function(par) {
var gl = this.contextGL;
var texturePosBuffer = this.texturePosBuffer;
var textureRef = this.textureRef;
var data = par.data;
var width = this.width;
var height = this.height;
var dataPerRow = par.dataPerRow || (width * 2);
var rowCnt = par.rowCnt || height;
gl.viewport(0, 0, width, height);
var tTop = 0;
var tLeft = 0;
var tBottom = height / rowCnt;
var tRight = width / (dataPerRow / 2);
var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW);
gl.uniform2f(gl.getUniformLocation(this.shaderProgram, 'resolution'), dataPerRow, height);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, textureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, dataPerRow, rowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
};
};
};
/**
* Returns true if the canvas supports WebGL
*/
YUVCanvas.prototype.isWebGL = function() {
return this.contextGL;
};
/**
* Create the GL context from the canvas element
*/
YUVCanvas.prototype.initContextGL = function() {
var canvas = this.canvasElement;
var gl = null;
var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"];
var nameIndex = 0;
while(!gl && nameIndex < validContextNames.length) {
var contextName = validContextNames[nameIndex];
try {
if (this.contextOptions){
gl = canvas.getContext(contextName, this.contextOptions);
}else{
gl = canvas.getContext(contextName);
};
} catch (e) {
gl = null;
}
if(!gl || typeof gl.getParameter !== "function") {
gl = null;
}
++nameIndex;
};
this.contextGL = gl;
};
/**
* Initialize GL shader program
*/
YUVCanvas.prototype.initProgram = function() {
var gl = this.contextGL;
// vertex shader is the same for all types
var vertexShaderScript;
var fragmentShaderScript;
if (this.type === "yuv420"){
vertexShaderScript = [
'attribute vec4 vertexPos;',
'attribute vec4 texturePos;',
'attribute vec4 uTexturePos;',
'attribute vec4 vTexturePos;',
'varying vec2 textureCoord;',
'varying vec2 uTextureCoord;',
'varying vec2 vTextureCoord;',
'void main()',
'{',
' gl_Position = vertexPos;',
' textureCoord = texturePos.xy;',
' uTextureCoord = uTexturePos.xy;',
' vTextureCoord = vTexturePos.xy;',
'}'
].join('\n');
fragmentShaderScript = [
'precision highp float;',
'varying highp vec2 textureCoord;',
'varying highp vec2 uTextureCoord;',
'varying highp vec2 vTextureCoord;',
'uniform sampler2D ySampler;',
'uniform sampler2D uSampler;',
'uniform sampler2D vSampler;',
'uniform mat4 YUV2RGB;',
'void main(void) {',
' highp float y = texture2D(ySampler, textureCoord).r;',
' highp float u = texture2D(uSampler, uTextureCoord).r;',
' highp float v = texture2D(vSampler, vTextureCoord).r;',
' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;',
'}'
].join('\n');
}else if (this.type === "yuv422"){
vertexShaderScript = [
'attribute vec4 vertexPos;',
'attribute vec4 texturePos;',
'varying vec2 textureCoord;',
'void main()',
'{',
' gl_Position = vertexPos;',
' textureCoord = texturePos.xy;',
'}'
].join('\n');
fragmentShaderScript = [
'precision highp float;',
'varying highp vec2 textureCoord;',
'uniform sampler2D sampler;',
'uniform highp vec2 resolution;',
'uniform mat4 YUV2RGB;',
'void main(void) {',
' highp float texPixX = 1.0 / resolution.x;',
' highp float logPixX = 2.0 / resolution.x;', // half the resolution of the texture
' highp float logHalfPixX = 4.0 / resolution.x;', // half of the logical resolution so every 4th pixel
' highp float steps = floor(textureCoord.x / logPixX);',
' highp float uvSteps = floor(textureCoord.x / logHalfPixX);',
' highp float y = texture2D(sampler, vec2((logPixX * steps) + texPixX, textureCoord.y)).r;',
' highp float u = texture2D(sampler, vec2((logHalfPixX * uvSteps), textureCoord.y)).r;',
' highp float v = texture2D(sampler, vec2((logHalfPixX * uvSteps) + texPixX + texPixX, textureCoord.y)).r;',
//' highp float y = texture2D(sampler, textureCoord).r;',
//' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;',
' gl_FragColor = vec4(y, u, v, 1.0) * YUV2RGB;',
'}'
].join('\n');
};
var YUV2RGB = [];
if (this.conversionType == "rec709") {
// ITU-T Rec. 709
YUV2RGB = [
1.16438, 0.00000, 1.79274, -0.97295,
1.16438, -0.21325, -0.53291, 0.30148,
1.16438, 2.11240, 0.00000, -1.13340,
0, 0, 0, 1,
];
} else {
// assume ITU-T Rec. 601
YUV2RGB = [
1.16438, 0.00000, 1.59603, -0.87079,
1.16438, -0.39176, -0.81297, 0.52959,
1.16438, 2.01723, 0.00000, -1.08139,
0, 0, 0, 1
];
};
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderScript);
gl.compileShader(vertexShader);
if(!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader));
}
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderScript);
gl.compileShader(fragmentShader);
if(!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader));
}
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if(!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.log('Program failed to compile: ' + gl.getProgramInfoLog(program));
}
gl.useProgram(program);
var YUV2RGBRef = gl.getUniformLocation(program, 'YUV2RGB');
gl.uniformMatrix4fv(YUV2RGBRef, false, YUV2RGB);
this.shaderProgram = program;
};
/**
* Initialize vertex buffers and attach to shader program
*/
YUVCanvas.prototype.initBuffers = function() {
var gl = this.contextGL;
var program = this.shaderProgram;
var vertexPosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW);
var vertexPosRef = gl.getAttribLocation(program, 'vertexPos');
gl.enableVertexAttribArray(vertexPosRef);
gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0);
if (this.animationTime){
var animationTime = this.animationTime;
var timePassed = 0;
var stepTime = 15;
var aniFun = function(){
timePassed += stepTime;
var mul = ( 1 * timePassed ) / animationTime;
if (timePassed >= animationTime){
mul = 1;
}else{
setTimeout(aniFun, stepTime);
};
var neg = -1 * mul;
var pos = 1 * mul;
var vertexPosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([pos, pos, neg, pos, pos, neg, neg, neg]), gl.STATIC_DRAW);
var vertexPosRef = gl.getAttribLocation(program, 'vertexPos');
gl.enableVertexAttribArray(vertexPosRef);
gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0);
try{
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
}catch(e){};
};
aniFun();
};
var texturePosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
var texturePosRef = gl.getAttribLocation(program, 'texturePos');
gl.enableVertexAttribArray(texturePosRef);
gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0);
this.texturePosBuffer = texturePosBuffer;
if (this.type === "yuv420"){
var uTexturePosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
var uTexturePosRef = gl.getAttribLocation(program, 'uTexturePos');
gl.enableVertexAttribArray(uTexturePosRef);
gl.vertexAttribPointer(uTexturePosRef, 2, gl.FLOAT, false, 0, 0);
this.uTexturePosBuffer = uTexturePosBuffer;
var vTexturePosBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
var vTexturePosRef = gl.getAttribLocation(program, 'vTexturePos');
gl.enableVertexAttribArray(vTexturePosRef);
gl.vertexAttribPointer(vTexturePosRef, 2, gl.FLOAT, false, 0, 0);
this.vTexturePosBuffer = vTexturePosBuffer;
};
};
/**
* Initialize GL textures and attach to shader program
*/
YUVCanvas.prototype.initTextures = function() {
var gl = this.contextGL;
var program = this.shaderProgram;
if (this.type === "yuv420"){
var yTextureRef = this.initTexture();
var ySamplerRef = gl.getUniformLocation(program, 'ySampler');
gl.uniform1i(ySamplerRef, 0);
this.yTextureRef = yTextureRef;
var uTextureRef = this.initTexture();
var uSamplerRef = gl.getUniformLocation(program, 'uSampler');
gl.uniform1i(uSamplerRef, 1);
this.uTextureRef = uTextureRef;
var vTextureRef = this.initTexture();
var vSamplerRef = gl.getUniformLocation(program, 'vSampler');
gl.uniform1i(vSamplerRef, 2);
this.vTextureRef = vTextureRef;
}else if (this.type === "yuv422"){
// only one texture for 422
var textureRef = this.initTexture();
var samplerRef = gl.getUniformLocation(program, 'sampler');
gl.uniform1i(samplerRef, 0);
this.textureRef = textureRef;
};
};
/**
* Create and configure a single texture
*/
YUVCanvas.prototype.initTexture = function() {
var gl = this.contextGL;
var textureRef = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, textureRef);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
return textureRef;
};
/**
* Draw picture data to the canvas.
* If this object is using WebGL, the data must be an I420 formatted ArrayBuffer,
* Otherwise, data must be an RGBA formatted ArrayBuffer.
*/
YUVCanvas.prototype.drawNextOutputPicture = function(width, height, croppingParams, data) {
var gl = this.contextGL;
if(gl) {
this.drawNextOuptutPictureGL(width, height, croppingParams, data);
} else {
this.drawNextOuptutPictureRGBA(width, height, croppingParams, data);
}
};
/**
* Draw next output picture using ARGB data on a 2d canvas.
*/
YUVCanvas.prototype.drawNextOuptutPictureRGBA = function(width, height, croppingParams, data) {
var canvas = this.canvasElement;
var croppingParams = null;
var argbData = data;
var ctx = canvas.getContext('2d');
var imageData = ctx.getImageData(0, 0, width, height);
imageData.data.set(argbData);
if(croppingParams === null) {
ctx.putImageData(imageData, 0, 0);
} else {
ctx.putImageData(imageData, -croppingParams.left, -croppingParams.top, 0, 0, croppingParams.width, croppingParams.height);
}
};
return YUVCanvas;
}));

Binary file not shown.

@ -0,0 +1,174 @@
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<link href="http://cdn.bootcss.com/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet" media="screen">
<script src="http://cdn.staticfile.org/jquery/2.2.4/jquery.min.js"></script>
<meta charset="utf-8">
<title>Live Video Based on Flask</title>
<link rel="stylesheet" href='/style.css'/>
<style type="text/css">
#front {
margin-left: 40px;
margin-bottom: 3px;
}
#rear{
margin-top: 3px;
margin-left: 40px;
}
.btn{
background: #e8080b;
}
</style>
</head>
<body>
<img src="{{ url_for('video_feed') }}">
<p> 小车控制界面</p>
<bodylink = "red">
<a href="control" target="_blank">小车控制</a>
<div id="container" class="container">
<div>
<button id="front" type="button" onclick="forward()" class="btn btn-lg btn-primary glyphicon glyphicon-circle-arrow-up"> </button>
</div>
<div>
<button id="printButton" type="button" onclick="printClicked()" class="btn btn-lg btn-primary">打印点击</button>
</div>
<div>
<button type="button" onclick="left()" class="btn btn-lg btn-primary glyphicon glyphicon-circle-arrow-left"> </button>
<button type="button" onclick="stop()" class="btn btn-lg btn-primary glyphicon glyphicon-stop"> </button>
<button type="button" onclick="right()" class="btn btn-lg btn-primary glyphicon glyphicon-circle-arrow-right"> </button>
</div>
<div>
<button id="rear" type="button" onclick="back()" class="btn btn-lg btn-primary glyphicon glyphicon-circle-arrow-down"> </button>
</div>
</div>
<script src='/Decoder.js'></script>
<script src='/YUVCanvas.js'></script>
<script src='/Player.js'></script>
<script>
// player
window.player = new Player({ useWorker: true, webgl: 'auto', size: { width: 848, height: 480 } })
var playerElement = document.getElementById('viewer')
playerElement.appendChild(window.player.canvas)
// Websocket
var wsUri = window.location.protocol.replace(/http/,'ws')+'//'+window.location.hostname+':9000'
var ws = new WebSocket(wsUri)
ws.binaryType = 'arraybuffer'
ws.onopen = function (e) {
console.log('Client connected')
ws.onmessage = function (msg) {
// decode stream
window.player.decode(new Uint8Array(msg.data));
}
}
ws.onclose = function (e) {
console.log('Client disconnected')
}
</script>
<script>
function printClicked() {
console.log("132465789891651354684");
}
function forward() {
console.log("132465789891651354684");
$.ajax({
type: "GET",
dataType: "json",
url: "/control/forward" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
}
function back() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/back" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
} function right() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/right" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
} function left() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/left" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
} function stop() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/stop" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
}
</script>
<script>
function refreshPage() {
location.reload(); // 刷新页面
}
</script>
<script src="//cdn.bootcss.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
</body>
</html>

@ -0,0 +1,187 @@
<!--
index.html
-->
<html>
<head>
<title>小车拍摄画面</title>
<link rel="stylesheet" href='/style.css'/>
<link href="http://cdn.bootcss.com/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet" media="screen">
<script src="http://cdn.staticfile.org/jquery/2.2.4/jquery.min.js"></script>
<style type="text/css">
#front {
margin-left: 40px;
margin-bottom: 3px;
}
#rear{
margin-top: 3px;
margin-left: 40px;
}
.btn{
background: #e8080b;
}
</style>
</head>
<body>
<h1>MJRoBot Lab Live Streaming</h1>
<div>
<img src="{{ url_for('video_feed') }}" width="45%" style="display:inline-block;">
<!-- <img src="{{ url_for('capture') }}" alt="Captured Image" width="45%" style="display:inline-block;"> -->
</div>
<hr>
<p> 小车控制界面</p>
<bodylink = "red">
<a href="control" target="_blank">小车控制</a>
<div id="container" class="container">
<div>
<button id="front" type="button" onclick="forward()" class="btn btn-lg btn-primary glyphicon glyphicon-circle-arrow-up"> </button>
</div>
<div>
<button type="button" onclick="left()" class="btn btn-lg btn-primary glyphicon glyphicon-circle-arrow-left"> </button>
<button type="button" onclick="stop()" class="btn btn-lg btn-primary glyphicon glyphicon-stop"> </button>
<button type="button" onclick="right()" class="btn btn-lg btn-primary glyphicon glyphicon-circle-arrow-right"> </button>
</div>
<div>
<button id="rear" type="button" onclick="back()" class="btn btn-lg btn-primary glyphicon glyphicon-circle-arrow-down"> </button>
</div>
<div>
<button id="screenshot" type="button" onclick="refreshPage()" class="btn btn-lg btn-primary glyphicon glyphicon-camera">截图</button>
</div>
</div>
<script src='/Decoder.js'></script>
<script src='/YUVCanvas.js'></script>
<script src='/Player.js'></script>
<script>
// player
window.player = new Player({ useWorker: true, webgl: 'auto', size: { width: 848, height: 480 } })
var playerElement = document.getElementById('viewer')
playerElement.appendChild(window.player.canvas)
// Websocket
var wsUri = window.location.protocol.replace(/http/,'ws')+'//'+window.location.hostname+':9000'
var ws = new WebSocket(wsUri)
ws.binaryType = 'arraybuffer'
ws.onopen = function (e) {
console.log('Client connected')
ws.onmessage = function (msg) {
// decode stream
window.player.decode(new Uint8Array(msg.data));
}
}
ws.onclose = function (e) {
console.log('Client disconnected')
}
</script>
<script type="text/javascript">
function forward() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/forward" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
}
function back() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/back" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
} function right() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/right" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
} function left() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/left" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
} function stop() {
$.ajax({
type: "GET",
dataType: "json",
url: "/control/stop" ,
data: $('#form1').serialize(), //提交的数据
success: function (result) {
console.log(result); //打印服务端返回的数据(调试用)
if (result.resultCode == 200) {
}
;
},
error : function() {
}
});
}
</script>
<!-- <script>
function myFunction() {
$.ajax({
url: "/capture",
method: "GET",
dataType: "blob", // 设置响应类型为二进制数据流
success: function(data) { // 回调函数
var img = new Image();
img.src = URL.createObjectURL(data); // 构建URL对象并将响应数据传入其中
document.body.appendChild(img); // 将Image对象添加到HTML DOM中显示
}
});
}
</script> -->
<script>
function refreshPage() {
location.reload(); // 刷新页面
}
</script>
<script src="//cdn.bootcss.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
</body>
</html>

@ -0,0 +1,19 @@
<!--
index.html
-->
<html>
<head>
<title>小车拍摄画面</title>
<link rel="stylesheet" href='../static/style.css'/>
</head>
<body>
<h1>MJRoBot Lab Live Streaming</h1>
<h3><img src="{{ url_for('video_feed') }}" width="50%"></h3>
<hr>
<p> 小车控制界面</p>
<bodylink = "red">
<a href="control" target="_blank">小车控制</a>
</body>
</html>

@ -0,0 +1,34 @@
<!DOCTYPE html>
<html>
<head>
<meta charset='utf-8'>
<title>PiCamera H264 Streaming</title>
</head>
<body>
<h1>PiCamera H264 Streaming</h1>
<div id='viewer'></div>
<script src='Decoder.js'></script>
<script src='YUVCanvas.js'></script>
<script src='Player.js'></script>
<script>
// player
window.player = new Player({ useWorker: true, webgl: 'auto', size: { width: 848, height: 480 } })
var playerElement = document.getElementById('viewer')
playerElement.appendChild(window.player.canvas)
// Websocket
var wsUri = window.location.protocol.replace(/http/,'ws')+'//'+window.location.hostname+':9000'
var ws = new WebSocket(wsUri)
ws.binaryType = 'arraybuffer'
ws.onopen = function (e) {
console.log('Client connected')
ws.onmessage = function (msg) {
// decode stream
window.player.decode(new Uint8Array(msg.data));
}
}
ws.onclose = function (e) {
console.log('Client disconnected')
}
</script>
</body>
</html>

@ -0,0 +1,9 @@
body{
background: white;
color: black;
padding:1%;
text-align: center;
}
Loading…
Cancel
Save