diff --git a/.gitignore b/.gitignore index ec2481e..5d947ca 100644 --- a/.gitignore +++ b/.gitignore @@ -1,21 +1,18 @@ -# ---> Vim -# Swap -[._]*.s[a-v][a-z] -!*.svg # comment out if you don't need vector files -[._]*.sw[a-p] -[._]s[a-rt-v][a-z] -[._]ss[a-gi-z] -[._]sw[a-p] +# Build and Release Folders +bin-debug/ +bin-release/ +[Oo]bj/ +[Bb]in/ -# Session -Session.vim -Sessionx.vim +# Other files and folders +.settings/ -# Temporary -.netrwhist -*~ -# Auto-generated tag files -tags -# Persistent undo -[._]*.un~ +# Executables +*.swf +*.air +*.ipa +*.apk +# Project files, i.e. `.project`, `.actionScriptProperties` and `.flexProperties` +# should NOT be excluded as they contain compiler settings and other important +# information for Eclipse / Flash Builder. diff --git a/README.md b/README.md index 4bf4cf6..2362711 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,33 @@ -# DigitImage +# DigitalImages +#### 介绍 +这是数字图像处理后端项目 + +#### 软件架构 +Flask框架搭建 + + +#### 安装教程 + +1. 导包 参照requirements.txt +2. 下载vox-adv-cpk.pth.tar文件 链接:https://pan.baidu.com/s/1_HUbCt7TZO_k8Kp19o8oKw 提取码:6beh +3. 在firstordermodel目录下新建checkpoints文件夹,将下载好的.tar文件放于此处 +4. 在导好包的环境中输入 python app.py 启动后端 + + +#### 参与贡献 + +1. Fork 本仓库 +2. 新建 Feat_xxx 分支 +3. 提交代码 +4. 新建 Pull Request + + +#### 特技 + +1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md +2. Gitee 官方博客 [blog.gitee.com](https://blog.gitee.com) +3. 你可以 [https://gitee.com/explore](https://gitee.com/explore) 这个地址来了解 Gitee 上的优秀开源项目 +4. [GVP](https://gitee.com/gvp) 全称是 Gitee 最有价值开源项目,是综合评定出的优秀开源项目 +5. Gitee 官方提供的使用手册 [https://gitee.com/help](https://gitee.com/help) +6. Gitee 封面人物是一档用来展示 Gitee 会员风采的栏目 [https://gitee.com/gitee-stars/](https://gitee.com/gitee-stars/) diff --git a/app.py b/app.py new file mode 100644 index 0000000..f68b0af --- /dev/null +++ b/app.py @@ -0,0 +1,172 @@ +from flask import Flask, request, send_file, jsonify, send_from_directory +from flask_cors import CORS +from PIL import Image, ImageFilter, ImageEnhance, ImageOps +import io +import uuid +import os +import sys +import yaml +import torch +import imageio +import imageio_ffmpeg as ffmpeg +import numpy as np +from moviepy.editor import VideoFileClip +from skimage.transform import resize +from skimage import img_as_ubyte +from tqdm import tqdm +sys.path.append(os.path.abspath('./firstordermodel')) +sys.path.append(".") +from modules.generator import OcclusionAwareGenerator +from modules.keypoint_detector import KPDetector +from animate import normalize_kp + +app = Flask(__name__) +CORS(app) + +@app.route('/process-image', methods=['POST']) +def process_image(): + file = request.files['file'] + operation = request.form['operation'] + parameter = request.form['parameter'] + + image = Image.open(file.stream) + + if operation == 'rotate': + image = image.rotate(float(parameter)) + elif operation == 'flip': + image = ImageOps.flip(image) + elif operation == 'scale': + scale_factor = float(parameter) + w, h = image.size + new_width = int(w*scale_factor) + new_height = int(h*scale_factor) + image = image.resize((new_width,new_height), Image.ANTIALIAS) + # blank=(new_width-new_height)*scale_factor + # image=image.crop((0,-blank,new_width,new_width-blank)) + elif operation == 'filter': + if parameter == 'BLUR': + image = image.filter(ImageFilter.BLUR) + elif parameter == 'EMBOSS': + image = image.filter(ImageFilter.EMBOSS) + elif parameter == 'CONTOUR': + image = image.filter(ImageFilter.CONTOUR) + elif parameter == 'SHARPEN': + image = image.filter(ImageFilter.SHARPEN) + elif operation == 'color_adjust': + r, g, b = map(float, parameter.split(',')) + r = r if r else 1.0 + g = g if g else 1.0 + b = b if b else 1.0 + r_channel, g_channel, b_channel = image.split() + r_channel = r_channel.point(lambda i: i * r) + g_channel = g_channel.point(lambda i: i * g) + b_channel = b_channel.point(lambda i: i * b) + image = Image.merge('RGB', (r_channel, g_channel, b_channel)) + elif operation == 'contrast': + enhancer = ImageEnhance.Contrast(image) + image = enhancer.enhance(2) + elif operation == 'smooth': + image = image.filter(ImageFilter.SMOOTH) + + img_io = io.BytesIO() + image.save(img_io, 'JPEG') + img_io.seek(0) + + return send_file(img_io, mimetype='image/jpeg') + + +def load_checkpoints(config_path, checkpoint_path, cpu=True): + with open(config_path) as f: + config = yaml.load(f, Loader=yaml.FullLoader) + + generator = OcclusionAwareGenerator(**config['model_params']['generator_params'], + **config['model_params']['common_params']) + if not cpu: + generator.cuda() + + kp_detector = KPDetector(**config['model_params']['kp_detector_params'], + **config['model_params']['common_params']) + if not cpu: + kp_detector.cuda() + + if cpu: + checkpoint = torch.load(checkpoint_path, map_location=torch.device('cpu')) + else: + checkpoint = torch.load(checkpoint_path) + + generator.load_state_dict(checkpoint['generator']) + kp_detector.load_state_dict(checkpoint['kp_detector']) + + if not cpu: + generator = DataParallelWithCallback(generator) + kp_detector = DataParallelWithCallback(kp_detector) + + generator.eval() + kp_detector.eval() + + return generator, kp_detector + +def make_animation(source_image, driving_video, generator, kp_detector, relative=True, adapt_movement_scale=True, cpu=True): + with torch.no_grad(): + predictions = [] + source = torch.tensor(source_image[np.newaxis].astype(np.float32)).permute(0, 3, 1, 2) + if not cpu: + source = source.cuda() + driving = torch.tensor(np.array(driving_video)[np.newaxis].astype(np.float32)).permute(0, 4, 1, 2, 3) + kp_source = kp_detector(source) + kp_driving_initial = kp_detector(driving[:, :, 0]) + + for frame_idx in tqdm(range(driving.shape[2])): + driving_frame = driving[:, :, frame_idx] + if not cpu: + driving_frame = driving_frame.cuda() + kp_driving = kp_detector(driving_frame) + kp_norm = normalize_kp(kp_source=kp_source, kp_driving=kp_driving, + kp_driving_initial=kp_driving_initial, use_relative_movement=relative, + use_relative_jacobian=relative, adapt_movement_scale=adapt_movement_scale) + out = generator(source, kp_source=kp_source, kp_driving=kp_norm) + + predictions.append(np.transpose(out['prediction'].data.cpu().numpy(), [0, 2, 3, 1])[0]) + return predictions + +@app.route('/motion-drive', methods=['POST']) +def motion_drive(): + image_file = request.files['image'] + video_file = request.files['video'] + + source_image = imageio.imread(image_file) + + # 保存视频文件到临时路径 + video_path = f"./data/{uuid.uuid1().hex}.mp4" + video_file.save(video_path) + + reader = imageio.get_reader(video_path, 'ffmpeg') + fps = reader.get_meta_data()['fps'] + driving_video = [] + try: + for im in reader: + driving_video.append(im) + except RuntimeError: + pass + reader.close() + + source_image = resize(source_image, (256, 256))[..., :3] + driving_video = [resize(frame, (256, 256))[..., :3] for frame in driving_video] + config_path = "./firstordermodel/config/vox-adv-256.yaml" + checkpoint_path = "./firstordermodel/checkpoints/vox-adv-cpk.pth.tar" + generator, kp_detector = load_checkpoints(config_path=config_path, checkpoint_path=checkpoint_path, cpu=True) + + predictions = make_animation(source_image, driving_video, generator, kp_detector, relative=True, adapt_movement_scale=True, cpu=True) + + result_filename = f"result_{uuid.uuid1().hex}.mp4" + result_path = os.path.join('data', result_filename) + imageio.mimsave(result_path, [img_as_ubyte(frame) for frame in predictions], fps=fps) + + return jsonify({"video_url": f"/data/{result_filename}"}) + +@app.route('/data/', methods=['GET']) +def download_file(filename): + return send_from_directory('data', filename) + +if __name__ == "__main__": + app.run(debug=True) diff --git a/data/db546a2c388411ef81c2e00af69c2d38.mp4 b/data/db546a2c388411ef81c2e00af69c2d38.mp4 new file mode 100644 index 0000000..293a1a3 Binary files /dev/null and b/data/db546a2c388411ef81c2e00af69c2d38.mp4 differ diff --git a/data/e1dc2c38388511efaa03e00af69c2d38.mp4 b/data/e1dc2c38388511efaa03e00af69c2d38.mp4 new file mode 100644 index 0000000..293a1a3 Binary files /dev/null and b/data/e1dc2c38388511efaa03e00af69c2d38.mp4 differ diff --git a/data/empty.txt b/data/empty.txt new file mode 100644 index 0000000..e69de29 diff --git a/data/result_4d37d8ec388611ef8d78e00af69c2d38.mp4 b/data/result_4d37d8ec388611ef8d78e00af69c2d38.mp4 new file mode 100644 index 0000000..38e72f8 Binary files /dev/null and b/data/result_4d37d8ec388611ef8d78e00af69c2d38.mp4 differ diff --git a/firstordermodel/.dockerignore b/firstordermodel/.dockerignore new file mode 100644 index 0000000..a47d3be --- /dev/null +++ b/firstordermodel/.dockerignore @@ -0,0 +1,3 @@ +/venv +.git +__pycache__ diff --git a/firstordermodel/.gitignore b/firstordermodel/.gitignore new file mode 100644 index 0000000..03d4031 --- /dev/null +++ b/firstordermodel/.gitignore @@ -0,0 +1,3 @@ +/.vscode +__pycache__ +/venv diff --git a/firstordermodel/Dockerfile b/firstordermodel/Dockerfile new file mode 100644 index 0000000..a51a921 --- /dev/null +++ b/firstordermodel/Dockerfile @@ -0,0 +1,13 @@ +FROM nvcr.io/nvidia/pytorch:21.02-py3 + +RUN DEBIAN_FRONTEND=noninteractive apt-get -qq update \ + && DEBIAN_FRONTEND=noninteractive apt-get -qqy install python3-pip ffmpeg git less nano libsm6 libxext6 libxrender-dev \ + && rm -rf /var/lib/apt/lists/* + +COPY . /app/ +WORKDIR /app + +RUN pip3 install --upgrade pip +RUN pip3 install \ + git+https://github.com/1adrianb/face-alignment \ + -r requirements.txt diff --git a/firstordermodel/__init__.py b/firstordermodel/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/firstordermodel/animate.py b/firstordermodel/animate.py new file mode 100644 index 0000000..42507ad --- /dev/null +++ b/firstordermodel/animate.py @@ -0,0 +1,101 @@ +import os +from tqdm import tqdm + +import torch +from torch.utils.data import DataLoader + +from frames_dataset import PairedDataset +from logger import Logger, Visualizer +import imageio +from scipy.spatial import ConvexHull +import numpy as np + +from sync_batchnorm import DataParallelWithCallback + + +def normalize_kp(kp_source, kp_driving, kp_driving_initial, adapt_movement_scale=False, + use_relative_movement=False, use_relative_jacobian=False): + if adapt_movement_scale: + source_area = ConvexHull(kp_source['value'][0].data.cpu().numpy()).volume + driving_area = ConvexHull(kp_driving_initial['value'][0].data.cpu().numpy()).volume + adapt_movement_scale = np.sqrt(source_area) / np.sqrt(driving_area) + else: + adapt_movement_scale = 1 + + kp_new = {k: v for k, v in kp_driving.items()} + + if use_relative_movement: + kp_value_diff = (kp_driving['value'] - kp_driving_initial['value']) + kp_value_diff *= adapt_movement_scale + kp_new['value'] = kp_value_diff + kp_source['value'] + + if use_relative_jacobian: + jacobian_diff = torch.matmul(kp_driving['jacobian'], torch.inverse(kp_driving_initial['jacobian'])) + kp_new['jacobian'] = torch.matmul(jacobian_diff, kp_source['jacobian']) + + return kp_new + + +def animate(config, generator, kp_detector, checkpoint, log_dir, dataset): + log_dir = os.path.join(log_dir, 'animation') + png_dir = os.path.join(log_dir, 'png') + animate_params = config['animate_params'] + + dataset = PairedDataset(initial_dataset=dataset, number_of_pairs=animate_params['num_pairs']) + dataloader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=1) + + if checkpoint is not None: + Logger.load_cpk(checkpoint, generator=generator, kp_detector=kp_detector) + else: + raise AttributeError("Checkpoint should be specified for mode='animate'.") + + if not os.path.exists(log_dir): + os.makedirs(log_dir) + + if not os.path.exists(png_dir): + os.makedirs(png_dir) + + if torch.cuda.is_available(): + generator = DataParallelWithCallback(generator) + kp_detector = DataParallelWithCallback(kp_detector) + + generator.eval() + kp_detector.eval() + + for it, x in tqdm(enumerate(dataloader)): + with torch.no_grad(): + predictions = [] + visualizations = [] + + driving_video = x['driving_video'] + source_frame = x['source_video'][:, :, 0, :, :] + + kp_source = kp_detector(source_frame) + kp_driving_initial = kp_detector(driving_video[:, :, 0]) + + for frame_idx in range(driving_video.shape[2]): + driving_frame = driving_video[:, :, frame_idx] + kp_driving = kp_detector(driving_frame) + kp_norm = normalize_kp(kp_source=kp_source, kp_driving=kp_driving, + kp_driving_initial=kp_driving_initial, **animate_params['normalization_params']) + out = generator(source_frame, kp_source=kp_source, kp_driving=kp_norm) + + out['kp_driving'] = kp_driving + out['kp_source'] = kp_source + out['kp_norm'] = kp_norm + + del out['sparse_deformed'] + + predictions.append(np.transpose(out['prediction'].data.cpu().numpy(), [0, 2, 3, 1])[0]) + + visualization = Visualizer(**config['visualizer_params']).visualize(source=source_frame, + driving=driving_frame, out=out) + visualization = visualization + visualizations.append(visualization) + + predictions = np.concatenate(predictions, axis=1) + result_name = "-".join([x['driving_name'][0], x['source_name'][0]]) + imageio.imsave(os.path.join(png_dir, result_name + '.png'), (255 * predictions).astype(np.uint8)) + + image_name = result_name + animate_params['format'] + imageio.mimsave(os.path.join(log_dir, image_name), visualizations) diff --git a/firstordermodel/augmentation.py b/firstordermodel/augmentation.py new file mode 100644 index 0000000..0bf4a3d --- /dev/null +++ b/firstordermodel/augmentation.py @@ -0,0 +1,345 @@ +""" +Code from https://github.com/hassony2/torch_videovision +""" + +import numbers + +import random +import numpy as np +import PIL + +from skimage.transform import resize, rotate +from numpy import pad +import torchvision + +import warnings + +from skimage import img_as_ubyte, img_as_float + + +def crop_clip(clip, min_h, min_w, h, w): + if isinstance(clip[0], np.ndarray): + cropped = [img[min_h:min_h + h, min_w:min_w + w, :] for img in clip] + + elif isinstance(clip[0], PIL.Image.Image): + cropped = [ + img.crop((min_w, min_h, min_w + w, min_h + h)) for img in clip + ] + else: + raise TypeError('Expected numpy.ndarray or PIL.Image' + + 'but got list of {0}'.format(type(clip[0]))) + return cropped + + +def pad_clip(clip, h, w): + im_h, im_w = clip[0].shape[:2] + pad_h = (0, 0) if h < im_h else ((h - im_h) // 2, (h - im_h + 1) // 2) + pad_w = (0, 0) if w < im_w else ((w - im_w) // 2, (w - im_w + 1) // 2) + + return pad(clip, ((0, 0), pad_h, pad_w, (0, 0)), mode='edge') + + +def resize_clip(clip, size, interpolation='bilinear'): + if isinstance(clip[0], np.ndarray): + if isinstance(size, numbers.Number): + im_h, im_w, im_c = clip[0].shape + # Min spatial dim already matches minimal size + if (im_w <= im_h and im_w == size) or (im_h <= im_w + and im_h == size): + return clip + new_h, new_w = get_resize_sizes(im_h, im_w, size) + size = (new_w, new_h) + else: + size = size[1], size[0] + + scaled = [ + resize(img, size, order=1 if interpolation == 'bilinear' else 0, preserve_range=True, + mode='constant', anti_aliasing=True) for img in clip + ] + elif isinstance(clip[0], PIL.Image.Image): + if isinstance(size, numbers.Number): + im_w, im_h = clip[0].size + # Min spatial dim already matches minimal size + if (im_w <= im_h and im_w == size) or (im_h <= im_w + and im_h == size): + return clip + new_h, new_w = get_resize_sizes(im_h, im_w, size) + size = (new_w, new_h) + else: + size = size[1], size[0] + if interpolation == 'bilinear': + pil_inter = PIL.Image.NEAREST + else: + pil_inter = PIL.Image.BILINEAR + scaled = [img.resize(size, pil_inter) for img in clip] + else: + raise TypeError('Expected numpy.ndarray or PIL.Image' + + 'but got list of {0}'.format(type(clip[0]))) + return scaled + + +def get_resize_sizes(im_h, im_w, size): + if im_w < im_h: + ow = size + oh = int(size * im_h / im_w) + else: + oh = size + ow = int(size * im_w / im_h) + return oh, ow + + +class RandomFlip(object): + def __init__(self, time_flip=False, horizontal_flip=False): + self.time_flip = time_flip + self.horizontal_flip = horizontal_flip + + def __call__(self, clip): + if random.random() < 0.5 and self.time_flip: + return clip[::-1] + if random.random() < 0.5 and self.horizontal_flip: + return [np.fliplr(img) for img in clip] + + return clip + + +class RandomResize(object): + """Resizes a list of (H x W x C) numpy.ndarray to the final size + The larger the original image is, the more times it takes to + interpolate + Args: + interpolation (str): Can be one of 'nearest', 'bilinear' + defaults to nearest + size (tuple): (widht, height) + """ + + def __init__(self, ratio=(3. / 4., 4. / 3.), interpolation='nearest'): + self.ratio = ratio + self.interpolation = interpolation + + def __call__(self, clip): + scaling_factor = random.uniform(self.ratio[0], self.ratio[1]) + + if isinstance(clip[0], np.ndarray): + im_h, im_w, im_c = clip[0].shape + elif isinstance(clip[0], PIL.Image.Image): + im_w, im_h = clip[0].size + + new_w = int(im_w * scaling_factor) + new_h = int(im_h * scaling_factor) + new_size = (new_w, new_h) + resized = resize_clip( + clip, new_size, interpolation=self.interpolation) + + return resized + + +class RandomCrop(object): + """Extract random crop at the same location for a list of videos + Args: + size (sequence or int): Desired output size for the + crop in format (h, w) + """ + + def __init__(self, size): + if isinstance(size, numbers.Number): + size = (size, size) + + self.size = size + + def __call__(self, clip): + """ + Args: + img (PIL.Image or numpy.ndarray): List of videos to be cropped + in format (h, w, c) in numpy.ndarray + Returns: + PIL.Image or numpy.ndarray: Cropped list of videos + """ + h, w = self.size + if isinstance(clip[0], np.ndarray): + im_h, im_w, im_c = clip[0].shape + elif isinstance(clip[0], PIL.Image.Image): + im_w, im_h = clip[0].size + else: + raise TypeError('Expected numpy.ndarray or PIL.Image' + + 'but got list of {0}'.format(type(clip[0]))) + + clip = pad_clip(clip, h, w) + im_h, im_w = clip.shape[1:3] + x1 = 0 if h == im_h else random.randint(0, im_w - w) + y1 = 0 if w == im_w else random.randint(0, im_h - h) + cropped = crop_clip(clip, y1, x1, h, w) + + return cropped + + +class RandomRotation(object): + """Rotate entire clip randomly by a random angle within + given bounds + Args: + degrees (sequence or int): Range of degrees to select from + If degrees is a number instead of sequence like (min, max), + the range of degrees, will be (-degrees, +degrees). + """ + + def __init__(self, degrees): + if isinstance(degrees, numbers.Number): + if degrees < 0: + raise ValueError('If degrees is a single number,' + 'must be positive') + degrees = (-degrees, degrees) + else: + if len(degrees) != 2: + raise ValueError('If degrees is a sequence,' + 'it must be of len 2.') + + self.degrees = degrees + + def __call__(self, clip): + """ + Args: + img (PIL.Image or numpy.ndarray): List of videos to be cropped + in format (h, w, c) in numpy.ndarray + Returns: + PIL.Image or numpy.ndarray: Cropped list of videos + """ + angle = random.uniform(self.degrees[0], self.degrees[1]) + if isinstance(clip[0], np.ndarray): + rotated = [rotate(image=img, angle=angle, preserve_range=True) for img in clip] + elif isinstance(clip[0], PIL.Image.Image): + rotated = [img.rotate(angle) for img in clip] + else: + raise TypeError('Expected numpy.ndarray or PIL.Image' + + 'but got list of {0}'.format(type(clip[0]))) + + return rotated + + +class ColorJitter(object): + """Randomly change the brightness, contrast and saturation and hue of the clip + Args: + brightness (float): How much to jitter brightness. brightness_factor + is chosen uniformly from [max(0, 1 - brightness), 1 + brightness]. + contrast (float): How much to jitter contrast. contrast_factor + is chosen uniformly from [max(0, 1 - contrast), 1 + contrast]. + saturation (float): How much to jitter saturation. saturation_factor + is chosen uniformly from [max(0, 1 - saturation), 1 + saturation]. + hue(float): How much to jitter hue. hue_factor is chosen uniformly from + [-hue, hue]. Should be >=0 and <= 0.5. + """ + + def __init__(self, brightness=0, contrast=0, saturation=0, hue=0): + self.brightness = brightness + self.contrast = contrast + self.saturation = saturation + self.hue = hue + + def get_params(self, brightness, contrast, saturation, hue): + if brightness > 0: + brightness_factor = random.uniform( + max(0, 1 - brightness), 1 + brightness) + else: + brightness_factor = None + + if contrast > 0: + contrast_factor = random.uniform( + max(0, 1 - contrast), 1 + contrast) + else: + contrast_factor = None + + if saturation > 0: + saturation_factor = random.uniform( + max(0, 1 - saturation), 1 + saturation) + else: + saturation_factor = None + + if hue > 0: + hue_factor = random.uniform(-hue, hue) + else: + hue_factor = None + return brightness_factor, contrast_factor, saturation_factor, hue_factor + + def __call__(self, clip): + """ + Args: + clip (list): list of PIL.Image + Returns: + list PIL.Image : list of transformed PIL.Image + """ + if isinstance(clip[0], np.ndarray): + brightness, contrast, saturation, hue = self.get_params( + self.brightness, self.contrast, self.saturation, self.hue) + + # Create img transform function sequence + img_transforms = [] + if brightness is not None: + img_transforms.append(lambda img: torchvision.transforms.functional.adjust_brightness(img, brightness)) + if saturation is not None: + img_transforms.append(lambda img: torchvision.transforms.functional.adjust_saturation(img, saturation)) + if hue is not None: + img_transforms.append(lambda img: torchvision.transforms.functional.adjust_hue(img, hue)) + if contrast is not None: + img_transforms.append(lambda img: torchvision.transforms.functional.adjust_contrast(img, contrast)) + random.shuffle(img_transforms) + img_transforms = [img_as_ubyte, torchvision.transforms.ToPILImage()] + img_transforms + [np.array, + img_as_float] + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + jittered_clip = [] + for img in clip: + jittered_img = img + for func in img_transforms: + jittered_img = func(jittered_img) + jittered_clip.append(jittered_img.astype('float32')) + elif isinstance(clip[0], PIL.Image.Image): + brightness, contrast, saturation, hue = self.get_params( + self.brightness, self.contrast, self.saturation, self.hue) + + # Create img transform function sequence + img_transforms = [] + if brightness is not None: + img_transforms.append(lambda img: torchvision.transforms.functional.adjust_brightness(img, brightness)) + if saturation is not None: + img_transforms.append(lambda img: torchvision.transforms.functional.adjust_saturation(img, saturation)) + if hue is not None: + img_transforms.append(lambda img: torchvision.transforms.functional.adjust_hue(img, hue)) + if contrast is not None: + img_transforms.append(lambda img: torchvision.transforms.functional.adjust_contrast(img, contrast)) + random.shuffle(img_transforms) + + # Apply to all videos + jittered_clip = [] + for img in clip: + for func in img_transforms: + jittered_img = func(img) + jittered_clip.append(jittered_img) + + else: + raise TypeError('Expected numpy.ndarray or PIL.Image' + + 'but got list of {0}'.format(type(clip[0]))) + return jittered_clip + + +class AllAugmentationTransform: + def __init__(self, resize_param=None, rotation_param=None, flip_param=None, crop_param=None, jitter_param=None): + self.transforms = [] + + if flip_param is not None: + self.transforms.append(RandomFlip(**flip_param)) + + if rotation_param is not None: + self.transforms.append(RandomRotation(**rotation_param)) + + if resize_param is not None: + self.transforms.append(RandomResize(**resize_param)) + + if crop_param is not None: + self.transforms.append(RandomCrop(**crop_param)) + + if jitter_param is not None: + self.transforms.append(ColorJitter(**jitter_param)) + + def __call__(self, clip): + for t in self.transforms: + clip = t(clip) + return clip diff --git a/firstordermodel/config/bair-256.yaml b/firstordermodel/config/bair-256.yaml new file mode 100644 index 0000000..daffb16 --- /dev/null +++ b/firstordermodel/config/bair-256.yaml @@ -0,0 +1,82 @@ +dataset_params: + root_dir: data/bair + frame_shape: [256, 256, 3] + id_sampling: False + augmentation_params: + flip_param: + horizontal_flip: True + time_flip: True + jitter_param: + brightness: 0.1 + contrast: 0.1 + saturation: 0.1 + hue: 0.1 + + +model_params: + common_params: + num_kp: 10 + num_channels: 3 + estimate_jacobian: True + kp_detector_params: + temperature: 0.1 + block_expansion: 32 + max_features: 1024 + scale_factor: 0.25 + num_blocks: 5 + generator_params: + block_expansion: 64 + max_features: 512 + num_down_blocks: 2 + num_bottleneck_blocks: 6 + estimate_occlusion_map: True + dense_motion_params: + block_expansion: 64 + max_features: 1024 + num_blocks: 5 + scale_factor: 0.25 + discriminator_params: + scales: [1] + block_expansion: 32 + max_features: 512 + num_blocks: 4 + sn: True + +train_params: + num_epochs: 20 + num_repeats: 1 + epoch_milestones: [12, 18] + lr_generator: 2.0e-4 + lr_discriminator: 2.0e-4 + lr_kp_detector: 2.0e-4 + batch_size: 36 + scales: [1, 0.5, 0.25, 0.125] + checkpoint_freq: 10 + transform_params: + sigma_affine: 0.05 + sigma_tps: 0.005 + points_tps: 5 + loss_weights: + generator_gan: 1 + discriminator_gan: 1 + feature_matching: [10, 10, 10, 10] + perceptual: [10, 10, 10, 10, 10] + equivariance_value: 10 + equivariance_jacobian: 10 + +reconstruction_params: + num_videos: 1000 + format: '.mp4' + +animate_params: + num_pairs: 50 + format: '.mp4' + normalization_params: + adapt_movement_scale: False + use_relative_movement: True + use_relative_jacobian: True + +visualizer_params: + kp_size: 5 + draw_border: True + colormap: 'gist_rainbow' diff --git a/firstordermodel/config/fashion-256.yaml b/firstordermodel/config/fashion-256.yaml new file mode 100644 index 0000000..760034d --- /dev/null +++ b/firstordermodel/config/fashion-256.yaml @@ -0,0 +1,77 @@ +dataset_params: + root_dir: data/fashion-png + frame_shape: [256, 256, 3] + id_sampling: False + augmentation_params: + flip_param: + horizontal_flip: True + time_flip: True + jitter_param: + hue: 0.1 + +model_params: + common_params: + num_kp: 10 + num_channels: 3 + estimate_jacobian: True + kp_detector_params: + temperature: 0.1 + block_expansion: 32 + max_features: 1024 + scale_factor: 0.25 + num_blocks: 5 + generator_params: + block_expansion: 64 + max_features: 512 + num_down_blocks: 2 + num_bottleneck_blocks: 6 + estimate_occlusion_map: True + dense_motion_params: + block_expansion: 64 + max_features: 1024 + num_blocks: 5 + scale_factor: 0.25 + discriminator_params: + scales: [1] + block_expansion: 32 + max_features: 512 + num_blocks: 4 + +train_params: + num_epochs: 100 + num_repeats: 50 + epoch_milestones: [60, 90] + lr_generator: 2.0e-4 + lr_discriminator: 2.0e-4 + lr_kp_detector: 2.0e-4 + batch_size: 27 + scales: [1, 0.5, 0.25, 0.125] + checkpoint_freq: 50 + transform_params: + sigma_affine: 0.05 + sigma_tps: 0.005 + points_tps: 5 + loss_weights: + generator_gan: 1 + discriminator_gan: 1 + feature_matching: [10, 10, 10, 10] + perceptual: [10, 10, 10, 10, 10] + equivariance_value: 10 + equivariance_jacobian: 10 + +reconstruction_params: + num_videos: 1000 + format: '.mp4' + +animate_params: + num_pairs: 50 + format: '.mp4' + normalization_params: + adapt_movement_scale: False + use_relative_movement: True + use_relative_jacobian: True + +visualizer_params: + kp_size: 5 + draw_border: True + colormap: 'gist_rainbow' diff --git a/firstordermodel/config/mgif-256.yaml b/firstordermodel/config/mgif-256.yaml new file mode 100644 index 0000000..598c60b --- /dev/null +++ b/firstordermodel/config/mgif-256.yaml @@ -0,0 +1,84 @@ +dataset_params: + root_dir: data/moving-gif + frame_shape: [256, 256, 3] + id_sampling: False + augmentation_params: + flip_param: + horizontal_flip: True + time_flip: True + crop_param: + size: [256, 256] + resize_param: + ratio: [0.9, 1.1] + jitter_param: + hue: 0.5 + +model_params: + common_params: + num_kp: 10 + num_channels: 3 + estimate_jacobian: True + kp_detector_params: + temperature: 0.1 + block_expansion: 32 + max_features: 1024 + scale_factor: 0.25 + num_blocks: 5 + single_jacobian_map: True + generator_params: + block_expansion: 64 + max_features: 512 + num_down_blocks: 2 + num_bottleneck_blocks: 6 + estimate_occlusion_map: True + dense_motion_params: + block_expansion: 64 + max_features: 1024 + num_blocks: 5 + scale_factor: 0.25 + discriminator_params: + scales: [1] + block_expansion: 32 + max_features: 512 + num_blocks: 4 + sn: True + +train_params: + num_epochs: 100 + num_repeats: 25 + epoch_milestones: [60, 90] + lr_generator: 2.0e-4 + lr_discriminator: 2.0e-4 + lr_kp_detector: 2.0e-4 + + batch_size: 36 + scales: [1, 0.5, 0.25, 0.125] + checkpoint_freq: 100 + transform_params: + sigma_affine: 0.05 + sigma_tps: 0.005 + points_tps: 5 + loss_weights: + generator_gan: 1 + discriminator_gan: 1 + feature_matching: [10, 10, 10, 10] + perceptual: [10, 10, 10, 10, 10] + equivariance_value: 10 + equivariance_jacobian: 10 + +reconstruction_params: + num_videos: 1000 + format: '.mp4' + +animate_params: + num_pairs: 50 + format: '.mp4' + normalization_params: + adapt_movement_scale: False + use_relative_movement: True + use_relative_jacobian: True + +visualizer_params: + kp_size: 5 + draw_border: True + colormap: 'gist_rainbow' diff --git a/firstordermodel/config/nemo-256.yaml b/firstordermodel/config/nemo-256.yaml new file mode 100644 index 0000000..1bd44b9 --- /dev/null +++ b/firstordermodel/config/nemo-256.yaml @@ -0,0 +1,76 @@ +dataset_params: + root_dir: data/nemo-png + frame_shape: [256, 256, 3] + id_sampling: False + augmentation_params: + flip_param: + horizontal_flip: True + time_flip: True + +model_params: + common_params: + num_kp: 10 + num_channels: 3 + estimate_jacobian: True + kp_detector_params: + temperature: 0.1 + block_expansion: 32 + max_features: 1024 + scale_factor: 0.25 + num_blocks: 5 + generator_params: + block_expansion: 64 + max_features: 512 + num_down_blocks: 2 + num_bottleneck_blocks: 6 + estimate_occlusion_map: True + dense_motion_params: + block_expansion: 64 + max_features: 1024 + num_blocks: 5 + scale_factor: 0.25 + discriminator_params: + scales: [1] + block_expansion: 32 + max_features: 512 + num_blocks: 4 + sn: True + +train_params: + num_epochs: 100 + num_repeats: 8 + epoch_milestones: [60, 90] + lr_generator: 2.0e-4 + lr_discriminator: 2.0e-4 + lr_kp_detector: 2.0e-4 + batch_size: 36 + scales: [1, 0.5, 0.25, 0.125] + checkpoint_freq: 50 + transform_params: + sigma_affine: 0.05 + sigma_tps: 0.005 + points_tps: 5 + loss_weights: + generator_gan: 1 + discriminator_gan: 1 + feature_matching: [10, 10, 10, 10] + perceptual: [10, 10, 10, 10, 10] + equivariance_value: 10 + equivariance_jacobian: 10 + +reconstruction_params: + num_videos: 1000 + format: '.mp4' + +animate_params: + num_pairs: 50 + format: '.mp4' + normalization_params: + adapt_movement_scale: False + use_relative_movement: True + use_relative_jacobian: True + +visualizer_params: + kp_size: 5 + draw_border: True + colormap: 'gist_rainbow' diff --git a/firstordermodel/config/taichi-256.yaml b/firstordermodel/config/taichi-256.yaml new file mode 100644 index 0000000..44b7839 --- /dev/null +++ b/firstordermodel/config/taichi-256.yaml @@ -0,0 +1,157 @@ +# Dataset parameters +# Each dataset should contain 2 folders train and test +# Each video can be represented as: +# - an image of concatenated frames +# - '.mp4' or '.gif' +# - folder with all frames from a specific video +# In case of Taichi. Same (youtube) video can be splitted in many parts (chunks). Each part has a following +# format (id)#other#info.mp4. For example '12335#adsbf.mp4' has an id 12335. In case of TaiChi id stands for youtube +# video id. +dataset_params: + # Path to data, data can be stored in several formats: .mp4 or .gif videos, stacked .png images or folders with frames. + root_dir: data/taichi-png + # Image shape, needed for staked .png format. + frame_shape: [256, 256, 3] + # In case of TaiChi single video can be splitted in many chunks, or the maybe several videos for single person. + # In this case epoch can be a pass over different videos (if id_sampling=True) or over different chunks (if id_sampling=False) + # If the name of the video '12335#adsbf.mp4' the id is assumed to be 12335 + id_sampling: True + # List with pairs for animation, None for random pairs + pairs_list: data/taichi256.csv + # Augmentation parameters see augmentation.py for all posible augmentations + augmentation_params: + flip_param: + horizontal_flip: True + time_flip: True + jitter_param: + brightness: 0.1 + contrast: 0.1 + saturation: 0.1 + hue: 0.1 + +# Defines model architecture +model_params: + common_params: + # Number of keypoint + num_kp: 10 + # Number of channels per image + num_channels: 3 + # Using first or zero order model + estimate_jacobian: True + kp_detector_params: + # Softmax temperature for keypoint heatmaps + temperature: 0.1 + # Number of features mutliplier + block_expansion: 32 + # Maximum allowed number of features + max_features: 1024 + # Number of block in Unet. Can be increased or decreased depending or resolution. + num_blocks: 5 + # Keypioint is predicted on smaller images for better performance, + # scale_factor=0.25 means that 256x256 image will be resized to 64x64 + scale_factor: 0.25 + generator_params: + # Number of features mutliplier + block_expansion: 64 + # Maximum allowed number of features + max_features: 512 + # Number of downsampling blocks in Jonson architecture. + # Can be increased or decreased depending or resolution. + num_down_blocks: 2 + # Number of ResBlocks in Jonson architecture. + num_bottleneck_blocks: 6 + # Use occlusion map or not + estimate_occlusion_map: True + + dense_motion_params: + # Number of features mutliplier + block_expansion: 64 + # Maximum allowed number of features + max_features: 1024 + # Number of block in Unet. Can be increased or decreased depending or resolution. + num_blocks: 5 + # Dense motion is predicted on smaller images for better performance, + # scale_factor=0.25 means that 256x256 image will be resized to 64x64 + scale_factor: 0.25 + discriminator_params: + # Discriminator can be multiscale, if you want 2 discriminator on original + # resolution and half of the original, specify scales: [1, 0.5] + scales: [1] + # Number of features mutliplier + block_expansion: 32 + # Maximum allowed number of features + max_features: 512 + # Number of blocks. Can be increased or decreased depending or resolution. + num_blocks: 4 + +# Parameters of training +train_params: + # Number of training epochs + num_epochs: 100 + # For better i/o performance when number of videos is small number of epochs can be multiplied by this number. + # Thus effectivlly with num_repeats=100 each epoch is 100 times larger. + num_repeats: 150 + # Drop learning rate by 10 times after this epochs + epoch_milestones: [60, 90] + # Initial learing rate for all modules + lr_generator: 2.0e-4 + lr_discriminator: 2.0e-4 + lr_kp_detector: 2.0e-4 + batch_size: 30 + # Scales for perceptual pyramide loss. If scales = [1, 0.5, 0.25, 0.125] and image resolution is 256x256, + # than the loss will be computer on resolutions 256x256, 128x128, 64x64, 32x32. + scales: [1, 0.5, 0.25, 0.125] + # Save checkpoint this frequently. If checkpoint_freq=50, checkpoint will be saved every 50 epochs. + checkpoint_freq: 50 + # Parameters of transform for equivariance loss + transform_params: + # Sigma for affine part + sigma_affine: 0.05 + # Sigma for deformation part + sigma_tps: 0.005 + # Number of point in the deformation grid + points_tps: 5 + loss_weights: + # Weight for LSGAN loss in generator, 0 for no adversarial loss. + generator_gan: 0 + # Weight for LSGAN loss in discriminator + discriminator_gan: 1 + # Weights for feature matching loss, the number should be the same as number of blocks in discriminator. + feature_matching: [10, 10, 10, 10] + # Weights for perceptual loss. + perceptual: [10, 10, 10, 10, 10] + # Weights for value equivariance. + equivariance_value: 10 + # Weights for jacobian equivariance. + equivariance_jacobian: 10 + +# Parameters of reconstruction +reconstruction_params: + # Maximum number of videos for reconstruction + num_videos: 1000 + # Format for visualization, note that results will be also stored in staked .png. + format: '.mp4' + +# Parameters of animation +animate_params: + # Maximum number of pairs for animation, the pairs will be either taken from pairs_list or random. + num_pairs: 50 + # Format for visualization, note that results will be also stored in staked .png. + format: '.mp4' + # Normalization of diriving keypoints + normalization_params: + # Increase or decrease relative movement scale depending on the size of the object + adapt_movement_scale: False + # Apply only relative displacement of the keypoint + use_relative_movement: True + # Apply only relative change in jacobian + use_relative_jacobian: True + +# Visualization parameters +visualizer_params: + # Draw keypoints of this size, increase or decrease depending on resolution + kp_size: 5 + # Draw white border around images + draw_border: True + # Color map for keypoints + colormap: 'gist_rainbow' diff --git a/firstordermodel/config/taichi-adv-256.yaml b/firstordermodel/config/taichi-adv-256.yaml new file mode 100644 index 0000000..cd1941e --- /dev/null +++ b/firstordermodel/config/taichi-adv-256.yaml @@ -0,0 +1,150 @@ +# Dataset parameters +dataset_params: + # Path to data, data can be stored in several formats: .mp4 or .gif videos, stacked .png images or folders with frames. + root_dir: data/taichi-png + # Image shape, needed for staked .png format. + frame_shape: [256, 256, 3] + # In case of TaiChi single video can be splitted in many chunks, or the maybe several videos for single person. + # In this case epoch can be a pass over different videos (if id_sampling=True) or over different chunks (if id_sampling=False) + # If the name of the video '12335#adsbf.mp4' the id is assumed to be 12335 + id_sampling: True + # List with pairs for animation, None for random pairs + pairs_list: data/taichi256.csv + # Augmentation parameters see augmentation.py for all posible augmentations + augmentation_params: + flip_param: + horizontal_flip: True + time_flip: True + jitter_param: + brightness: 0.1 + contrast: 0.1 + saturation: 0.1 + hue: 0.1 + +# Defines model architecture +model_params: + common_params: + # Number of keypoint + num_kp: 10 + # Number of channels per image + num_channels: 3 + # Using first or zero order model + estimate_jacobian: True + kp_detector_params: + # Softmax temperature for keypoint heatmaps + temperature: 0.1 + # Number of features mutliplier + block_expansion: 32 + # Maximum allowed number of features + max_features: 1024 + # Number of block in Unet. Can be increased or decreased depending or resolution. + num_blocks: 5 + # Keypioint is predicted on smaller images for better performance, + # scale_factor=0.25 means that 256x256 image will be resized to 64x64 + scale_factor: 0.25 + generator_params: + # Number of features mutliplier + block_expansion: 64 + # Maximum allowed number of features + max_features: 512 + # Number of downsampling blocks in Jonson architecture. + # Can be increased or decreased depending or resolution. + num_down_blocks: 2 + # Number of ResBlocks in Jonson architecture. + num_bottleneck_blocks: 6 + # Use occlusion map or not + estimate_occlusion_map: True + + dense_motion_params: + # Number of features mutliplier + block_expansion: 64 + # Maximum allowed number of features + max_features: 1024 + # Number of block in Unet. Can be increased or decreased depending or resolution. + num_blocks: 5 + # Dense motion is predicted on smaller images for better performance, + # scale_factor=0.25 means that 256x256 image will be resized to 64x64 + scale_factor: 0.25 + discriminator_params: + # Discriminator can be multiscale, if you want 2 discriminator on original + # resolution and half of the original, specify scales: [1, 0.5] + scales: [1] + # Number of features mutliplier + block_expansion: 32 + # Maximum allowed number of features + max_features: 512 + # Number of blocks. Can be increased or decreased depending or resolution. + num_blocks: 4 + use_kp: True + +# Parameters of training +train_params: + # Number of training epochs + num_epochs: 150 + # For better i/o performance when number of videos is small number of epochs can be multiplied by this number. + # Thus effectivlly with num_repeats=100 each epoch is 100 times larger. + num_repeats: 150 + # Drop learning rate by 10 times after this epochs + epoch_milestones: [] + # Initial learing rate for all modules + lr_generator: 2.0e-4 + lr_discriminator: 2.0e-4 + lr_kp_detector: 0 + batch_size: 27 + # Scales for perceptual pyramide loss. If scales = [1, 0.5, 0.25, 0.125] and image resolution is 256x256, + # than the loss will be computer on resolutions 256x256, 128x128, 64x64, 32x32. + scales: [1, 0.5, 0.25, 0.125] + # Save checkpoint this frequently. If checkpoint_freq=50, checkpoint will be saved every 50 epochs. + checkpoint_freq: 50 + # Parameters of transform for equivariance loss + transform_params: + # Sigma for affine part + sigma_affine: 0.05 + # Sigma for deformation part + sigma_tps: 0.005 + # Number of point in the deformation grid + points_tps: 5 + loss_weights: + # Weight for LSGAN loss in generator + generator_gan: 1 + # Weight for LSGAN loss in discriminator + discriminator_gan: 1 + # Weights for feature matching loss, the number should be the same as number of blocks in discriminator. + feature_matching: [10, 10, 10, 10] + # Weights for perceptual loss. + perceptual: [10, 10, 10, 10, 10] + # Weights for value equivariance. + equivariance_value: 10 + # Weights for jacobian equivariance. + equivariance_jacobian: 10 + +# Parameters of reconstruction +reconstruction_params: + # Maximum number of videos for reconstruction + num_videos: 1000 + # Format for visualization, note that results will be also stored in staked .png. + format: '.mp4' + +# Parameters of animation +animate_params: + # Maximum number of pairs for animation, the pairs will be either taken from pairs_list or random. + num_pairs: 50 + # Format for visualization, note that results will be also stored in staked .png. + format: '.mp4' + # Normalization of diriving keypoints + normalization_params: + # Increase or decrease relative movement scale depending on the size of the object + adapt_movement_scale: False + # Apply only relative displacement of the keypoint + use_relative_movement: True + # Apply only relative change in jacobian + use_relative_jacobian: True + +# Visualization parameters +visualizer_params: + # Draw keypoints of this size, increase or decrease depending on resolution + kp_size: 5 + # Draw white border around images + draw_border: True + # Color map for keypoints + colormap: 'gist_rainbow' diff --git a/firstordermodel/config/vox-256.yaml b/firstordermodel/config/vox-256.yaml new file mode 100644 index 0000000..abfe9a2 --- /dev/null +++ b/firstordermodel/config/vox-256.yaml @@ -0,0 +1,83 @@ +dataset_params: + root_dir: data/vox-png + frame_shape: [256, 256, 3] + id_sampling: True + pairs_list: data/vox256.csv + augmentation_params: + flip_param: + horizontal_flip: True + time_flip: True + jitter_param: + brightness: 0.1 + contrast: 0.1 + saturation: 0.1 + hue: 0.1 + + +model_params: + common_params: + num_kp: 10 + num_channels: 3 + estimate_jacobian: True + kp_detector_params: + temperature: 0.1 + block_expansion: 32 + max_features: 1024 + scale_factor: 0.25 + num_blocks: 5 + generator_params: + block_expansion: 64 + max_features: 512 + num_down_blocks: 2 + num_bottleneck_blocks: 6 + estimate_occlusion_map: True + dense_motion_params: + block_expansion: 64 + max_features: 1024 + num_blocks: 5 + scale_factor: 0.25 + discriminator_params: + scales: [1] + block_expansion: 32 + max_features: 512 + num_blocks: 4 + sn: True + +train_params: + num_epochs: 100 + num_repeats: 75 + epoch_milestones: [60, 90] + lr_generator: 2.0e-4 + lr_discriminator: 2.0e-4 + lr_kp_detector: 2.0e-4 + batch_size: 40 + scales: [1, 0.5, 0.25, 0.125] + checkpoint_freq: 50 + transform_params: + sigma_affine: 0.05 + sigma_tps: 0.005 + points_tps: 5 + loss_weights: + generator_gan: 0 + discriminator_gan: 1 + feature_matching: [10, 10, 10, 10] + perceptual: [10, 10, 10, 10, 10] + equivariance_value: 10 + equivariance_jacobian: 10 + +reconstruction_params: + num_videos: 1000 + format: '.mp4' + +animate_params: + num_pairs: 50 + format: '.mp4' + normalization_params: + adapt_movement_scale: False + use_relative_movement: True + use_relative_jacobian: True + +visualizer_params: + kp_size: 5 + draw_border: True + colormap: 'gist_rainbow' diff --git a/firstordermodel/config/vox-adv-256.yaml b/firstordermodel/config/vox-adv-256.yaml new file mode 100644 index 0000000..ed89890 --- /dev/null +++ b/firstordermodel/config/vox-adv-256.yaml @@ -0,0 +1,84 @@ +dataset_params: + root_dir: data/vox-png + frame_shape: [256, 256, 3] + id_sampling: True + pairs_list: data/vox256.csv + augmentation_params: + flip_param: + horizontal_flip: True + time_flip: True + jitter_param: + brightness: 0.1 + contrast: 0.1 + saturation: 0.1 + hue: 0.1 + + +model_params: + common_params: + num_kp: 10 + num_channels: 3 + estimate_jacobian: True + kp_detector_params: + temperature: 0.1 + block_expansion: 32 + max_features: 1024 + scale_factor: 0.25 + num_blocks: 5 + generator_params: + block_expansion: 64 + max_features: 512 + num_down_blocks: 2 + num_bottleneck_blocks: 6 + estimate_occlusion_map: True + dense_motion_params: + block_expansion: 64 + max_features: 1024 + num_blocks: 5 + scale_factor: 0.25 + discriminator_params: + scales: [1] + block_expansion: 32 + max_features: 512 + num_blocks: 4 + use_kp: True + + +train_params: + num_epochs: 150 + num_repeats: 75 + epoch_milestones: [] + lr_generator: 2.0e-4 + lr_discriminator: 2.0e-4 + lr_kp_detector: 2.0e-4 + batch_size: 36 + scales: [1, 0.5, 0.25, 0.125] + checkpoint_freq: 50 + transform_params: + sigma_affine: 0.05 + sigma_tps: 0.005 + points_tps: 5 + loss_weights: + generator_gan: 1 + discriminator_gan: 1 + feature_matching: [10, 10, 10, 10] + perceptual: [10, 10, 10, 10, 10] + equivariance_value: 10 + equivariance_jacobian: 10 + +reconstruction_params: + num_videos: 1000 + format: '.mp4' + +animate_params: + num_pairs: 50 + format: '.mp4' + normalization_params: + adapt_movement_scale: False + use_relative_movement: True + use_relative_jacobian: True + +visualizer_params: + kp_size: 5 + draw_border: True + colormap: 'gist_rainbow' diff --git a/firstordermodel/crop-video.py b/firstordermodel/crop-video.py new file mode 100644 index 0000000..1a7740e --- /dev/null +++ b/firstordermodel/crop-video.py @@ -0,0 +1,158 @@ +import face_alignment +import skimage.io +import numpy +from argparse import ArgumentParser +from skimage import img_as_ubyte +from skimage.transform import resize +from tqdm import tqdm +import os +import imageio +import numpy as np +import warnings +warnings.filterwarnings("ignore") + +def extract_bbox(frame, fa): + if max(frame.shape[0], frame.shape[1]) > 640: + scale_factor = max(frame.shape[0], frame.shape[1]) / 640.0 + frame = resize(frame, (int(frame.shape[0] / scale_factor), int(frame.shape[1] / scale_factor))) + frame = img_as_ubyte(frame) + else: + scale_factor = 1 + frame = frame[..., :3] + bboxes = fa.face_detector.detect_from_image(frame[..., ::-1]) + if len(bboxes) == 0: + return [] + return np.array(bboxes)[:, :-1] * scale_factor + + + +def bb_intersection_over_union(boxA, boxB): + xA = max(boxA[0], boxB[0]) + yA = max(boxA[1], boxB[1]) + xB = min(boxA[2], boxB[2]) + yB = min(boxA[3], boxB[3]) + interArea = max(0, xB - xA + 1) * max(0, yB - yA + 1) + boxAArea = (boxA[2] - boxA[0] + 1) * (boxA[3] - boxA[1] + 1) + boxBArea = (boxB[2] - boxB[0] + 1) * (boxB[3] - boxB[1] + 1) + iou = interArea / float(boxAArea + boxBArea - interArea) + return iou + + +def join(tube_bbox, bbox): + xA = min(tube_bbox[0], bbox[0]) + yA = min(tube_bbox[1], bbox[1]) + xB = max(tube_bbox[2], bbox[2]) + yB = max(tube_bbox[3], bbox[3]) + return (xA, yA, xB, yB) + + +def compute_bbox(start, end, fps, tube_bbox, frame_shape, inp, image_shape, increase_area=0.1): + left, top, right, bot = tube_bbox + width = right - left + height = bot - top + + #Computing aspect preserving bbox + width_increase = max(increase_area, ((1 + 2 * increase_area) * height - width) / (2 * width)) + height_increase = max(increase_area, ((1 + 2 * increase_area) * width - height) / (2 * height)) + + left = int(left - width_increase * width) + top = int(top - height_increase * height) + right = int(right + width_increase * width) + bot = int(bot + height_increase * height) + + top, bot, left, right = max(0, top), min(bot, frame_shape[0]), max(0, left), min(right, frame_shape[1]) + h, w = bot - top, right - left + + start = start / fps + end = end / fps + time = end - start + + scale = f'{image_shape[0]}:{image_shape[1]}' + + return f'ffmpeg -i {inp} -ss {start} -t {time} -filter:v "crop={w}:{h}:{left}:{top}, scale={scale}" crop.mp4' + + +def compute_bbox_trajectories(trajectories, fps, frame_shape, args): + commands = [] + for i, (bbox, tube_bbox, start, end) in enumerate(trajectories): + if (end - start) > args.min_frames: + command = compute_bbox(start, end, fps, tube_bbox, frame_shape, inp=args.inp, image_shape=args.image_shape, increase_area=args.increase) + commands.append(command) + return commands + + +def process_video(args): + device = 'cpu' if args.cpu else 'cuda' + fa = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=False, device=device) + video = imageio.get_reader(args.inp) + + trajectories = [] + previous_frame = None + fps = video.get_meta_data()['fps'] + commands = [] + try: + for i, frame in tqdm(enumerate(video)): + frame_shape = frame.shape + bboxes = extract_bbox(frame, fa) + ## For each trajectory check the criterion + not_valid_trajectories = [] + valid_trajectories = [] + + for trajectory in trajectories: + tube_bbox = trajectory[0] + intersection = 0 + for bbox in bboxes: + intersection = max(intersection, bb_intersection_over_union(tube_bbox, bbox)) + if intersection > args.iou_with_initial: + valid_trajectories.append(trajectory) + else: + not_valid_trajectories.append(trajectory) + + commands += compute_bbox_trajectories(not_valid_trajectories, fps, frame_shape, args) + trajectories = valid_trajectories + + ## Assign bbox to trajectories, create new trajectories + for bbox in bboxes: + intersection = 0 + current_trajectory = None + for trajectory in trajectories: + tube_bbox = trajectory[0] + current_intersection = bb_intersection_over_union(tube_bbox, bbox) + if intersection < current_intersection and current_intersection > args.iou_with_initial: + intersection = bb_intersection_over_union(tube_bbox, bbox) + current_trajectory = trajectory + + ## Create new trajectory + if current_trajectory is None: + trajectories.append([bbox, bbox, i, i]) + else: + current_trajectory[3] = i + current_trajectory[1] = join(current_trajectory[1], bbox) + + + except IndexError as e: + raise (e) + + commands += compute_bbox_trajectories(trajectories, fps, frame_shape, args) + return commands + + +if __name__ == "__main__": + parser = ArgumentParser() + + parser.add_argument("--image_shape", default=(256, 256), type=lambda x: tuple(map(int, x.split(','))), + help="Image shape") + parser.add_argument("--increase", default=0.1, type=float, help='Increase bbox by this amount') + parser.add_argument("--iou_with_initial", type=float, default=0.25, help="The minimal allowed iou with inital bbox") + parser.add_argument("--inp", required=True, help='Input image or video') + parser.add_argument("--min_frames", type=int, default=150, help='Minimum number of frames') + parser.add_argument("--cpu", dest="cpu", action="store_true", help="cpu mode.") + + + args = parser.parse_args() + + commands = process_video(args) + for command in commands: + print (command) + + \ No newline at end of file diff --git a/firstordermodel/data/bair256.csv b/firstordermodel/data/bair256.csv new file mode 100644 index 0000000..f34ecab --- /dev/null +++ b/firstordermodel/data/bair256.csv @@ -0,0 +1,51 @@ +distance,source,driving,frame +0,000054.mp4,000048.mp4,0 +0,000050.mp4,000063.mp4,0 +0,000073.mp4,000007.mp4,0 +0,000021.mp4,000010.mp4,0 +0,000084.mp4,000046.mp4,0 +0,000031.mp4,000102.mp4,0 +0,000029.mp4,000111.mp4,0 +0,000090.mp4,000112.mp4,0 +0,000039.mp4,000010.mp4,0 +0,000008.mp4,000069.mp4,0 +0,000068.mp4,000076.mp4,0 +0,000051.mp4,000052.mp4,0 +0,000022.mp4,000098.mp4,0 +0,000096.mp4,000032.mp4,0 +0,000032.mp4,000099.mp4,0 +0,000006.mp4,000053.mp4,0 +0,000098.mp4,000020.mp4,0 +0,000029.mp4,000066.mp4,0 +0,000022.mp4,000007.mp4,0 +0,000027.mp4,000065.mp4,0 +0,000026.mp4,000059.mp4,0 +0,000015.mp4,000112.mp4,0 +0,000086.mp4,000123.mp4,0 +0,000103.mp4,000052.mp4,0 +0,000123.mp4,000103.mp4,0 +0,000051.mp4,000005.mp4,0 +0,000062.mp4,000125.mp4,0 +0,000126.mp4,000111.mp4,0 +0,000066.mp4,000090.mp4,0 +0,000075.mp4,000106.mp4,0 +0,000020.mp4,000010.mp4,0 +0,000076.mp4,000028.mp4,0 +0,000062.mp4,000002.mp4,0 +0,000095.mp4,000127.mp4,0 +0,000113.mp4,000072.mp4,0 +0,000027.mp4,000104.mp4,0 +0,000054.mp4,000124.mp4,0 +0,000019.mp4,000089.mp4,0 +0,000052.mp4,000072.mp4,0 +0,000108.mp4,000033.mp4,0 +0,000044.mp4,000118.mp4,0 +0,000029.mp4,000086.mp4,0 +0,000068.mp4,000066.mp4,0 +0,000014.mp4,000036.mp4,0 +0,000053.mp4,000071.mp4,0 +0,000022.mp4,000094.mp4,0 +0,000000.mp4,000121.mp4,0 +0,000071.mp4,000079.mp4,0 +0,000127.mp4,000005.mp4,0 +0,000085.mp4,000023.mp4,0 diff --git a/firstordermodel/data/chuck.mp4 b/firstordermodel/data/chuck.mp4 new file mode 100644 index 0000000..293a1a3 Binary files /dev/null and b/firstordermodel/data/chuck.mp4 differ diff --git a/firstordermodel/data/taichi-loading/README.md b/firstordermodel/data/taichi-loading/README.md new file mode 100644 index 0000000..e64134b --- /dev/null +++ b/firstordermodel/data/taichi-loading/README.md @@ -0,0 +1,18 @@ +# TaiChi dataset + +The scripst for loading the TaiChi dataset. + +We provide only the id of the corresponding video and the bounding box. Following script will download videos from youtube and crop them according to the provided bounding boxes. + +1) Load youtube-dl: +``` +wget https://yt-dl.org/downloads/latest/youtube-dl -O youtube-dl +chmod a+rx youtube-dl +``` + +2) Run script to download videos, there are 2 formats that can be used for storing videos one is .mp4 and another is folder with .png images. While .png images occupy significantly more space, the format is loss-less and have better i/o performance when training. + +``` +python load_videos.py --metadata taichi-metadata.csv --format .mp4 --out_folder taichi --workers 8 +``` +select number of workers based on number of cpu avaliable. Note .png format take aproximatly 80GB. diff --git a/firstordermodel/data/taichi-loading/load_videos.py b/firstordermodel/data/taichi-loading/load_videos.py new file mode 100644 index 0000000..7840445 --- /dev/null +++ b/firstordermodel/data/taichi-loading/load_videos.py @@ -0,0 +1,113 @@ +import numpy as np +import pandas as pd +import imageio +import os +import subprocess +from multiprocessing import Pool +from itertools import cycle +import warnings +import glob +import time +from tqdm import tqdm +from argparse import ArgumentParser +from skimage import img_as_ubyte +from skimage.transform import resize +warnings.filterwarnings("ignore") + +DEVNULL = open(os.devnull, 'wb') + + +def save(path, frames, format): + if format == '.mp4': + imageio.mimsave(path, frames) + elif format == '.png': + if os.path.exists(path): + print ("Warning: skiping video %s" % os.path.basename(path)) + return + else: + os.makedirs(path) + for j, frame in enumerate(frames): + imageio.imsave(os.path.join(path, str(j).zfill(7) + '.png'), frames[j]) + else: + print ("Unknown format %s" % format) + exit() + + +def download(video_id, args): + video_path = os.path.join(args.video_folder, video_id + ".mp4") + subprocess.call([args.youtube, '-f', "''best/mp4''", '--write-auto-sub', '--write-sub', + '--sub-lang', 'en', '--skip-unavailable-fragments', + "https://www.youtube.com/watch?v=" + video_id, "--output", + video_path], stdout=DEVNULL, stderr=DEVNULL) + return video_path + + +def run(data): + video_id, args = data + if not os.path.exists(os.path.join(args.video_folder, video_id.split('#')[0] + '.mp4')): + download(video_id.split('#')[0], args) + + if not os.path.exists(os.path.join(args.video_folder, video_id.split('#')[0] + '.mp4')): + print ('Can not load video %s, broken link' % video_id.split('#')[0]) + return + reader = imageio.get_reader(os.path.join(args.video_folder, video_id.split('#')[0] + '.mp4')) + fps = reader.get_meta_data()['fps'] + + df = pd.read_csv(args.metadata) + df = df[df['video_id'] == video_id] + + all_chunks_dict = [{'start': df['start'].iloc[j], 'end': df['end'].iloc[j], + 'bbox': list(map(int, df['bbox'].iloc[j].split('-'))), 'frames':[]} for j in range(df.shape[0])] + ref_fps = df['fps'].iloc[0] + ref_height = df['height'].iloc[0] + ref_width = df['width'].iloc[0] + partition = df['partition'].iloc[0] + try: + for i, frame in enumerate(reader): + for entry in all_chunks_dict: + if (i * ref_fps >= entry['start'] * fps) and (i * ref_fps < entry['end'] * fps): + left, top, right, bot = entry['bbox'] + left = int(left / (ref_width / frame.shape[1])) + top = int(top / (ref_height / frame.shape[0])) + right = int(right / (ref_width / frame.shape[1])) + bot = int(bot / (ref_height / frame.shape[0])) + crop = frame[top:bot, left:right] + if args.image_shape is not None: + crop = img_as_ubyte(resize(crop, args.image_shape, anti_aliasing=True)) + entry['frames'].append(crop) + except imageio.core.format.CannotReadFrameError: + None + + for entry in all_chunks_dict: + first_part = '#'.join(video_id.split('#')[::-1]) + path = first_part + '#' + str(entry['start']).zfill(6) + '#' + str(entry['end']).zfill(6) + '.mp4' + save(os.path.join(args.out_folder, partition, path), entry['frames'], args.format) + + +if __name__ == "__main__": + parser = ArgumentParser() + parser.add_argument("--video_folder", default='youtube-taichi', help='Path to youtube videos') + parser.add_argument("--metadata", default='taichi-metadata-new.csv', help='Path to metadata') + parser.add_argument("--out_folder", default='taichi-png', help='Path to output') + parser.add_argument("--format", default='.png', help='Storing format') + parser.add_argument("--workers", default=1, type=int, help='Number of workers') + parser.add_argument("--youtube", default='./youtube-dl', help='Path to youtube-dl') + + parser.add_argument("--image_shape", default=(256, 256), type=lambda x: tuple(map(int, x.split(','))), + help="Image shape, None for no resize") + + args = parser.parse_args() + if not os.path.exists(args.video_folder): + os.makedirs(args.video_folder) + if not os.path.exists(args.out_folder): + os.makedirs(args.out_folder) + for partition in ['test', 'train']: + if not os.path.exists(os.path.join(args.out_folder, partition)): + os.makedirs(os.path.join(args.out_folder, partition)) + + df = pd.read_csv(args.metadata) + video_ids = set(df['video_id']) + pool = Pool(processes=args.workers) + args_list = cycle([args]) + for chunks_data in tqdm(pool.imap_unordered(run, zip(video_ids, args_list))): + None diff --git a/firstordermodel/data/taichi-loading/taichi-metadata.csv b/firstordermodel/data/taichi-loading/taichi-metadata.csv new file mode 100644 index 0000000..7f51307 --- /dev/null +++ b/firstordermodel/data/taichi-loading/taichi-metadata.csv @@ -0,0 +1,3335 @@ +video_id,start,end,bbox,fps,width,height,partition +u6rJiF-gPx8,80,300,118-82-508-472,29.97,640,480,train +u6rJiF-gPx8,575,717,168-99-488-419,29.97,640,480,train +u6rJiF-gPx8,1257,1550,132-46-539-453,29.97,640,480,train +u6rJiF-gPx8,1551,1808,151-59-547-455,29.97,640,480,train +u6rJiF-gPx8,1888,2161,124-37-541-454,29.97,640,480,train +u6rJiF-gPx8,2484,2719,150-40-570-460,29.97,640,480,train +u6rJiF-gPx8,3000,3129,89-69-473-453,29.97,640,480,train +u6rJiF-gPx8,3300,3450,124-95-491-462,29.97,640,480,train +u6rJiF-gPx8,3600,3750,117-104-443-430,29.97,640,480,train +u6rJiF-gPx8,4157,4350,150-87-525-462,29.97,640,480,train +NBGys-uxScs,134,312,152-118-440-406,29.97,640,480,train +NBGys-uxScs,403,587,145-136-410-401,29.97,640,480,train +NBGys-uxScs,898,1092,95-79-409-393,29.97,640,480,train +NBGys-uxScs,1262,1535,49-57-391-399,29.97,640,480,train +NBGys-uxScs,1923,2165,232-80-554-402,29.97,640,480,train +NBGys-uxScs,2165,2324,138-63-465-390,29.97,640,480,train +NBGys-uxScs,2324,2503,88-72-410-394,29.97,640,480,train +NBGys-uxScs,2698,2840,154-73-486-405,29.97,640,480,train +NBGys-uxScs,2840,3222,191-53-537-399,29.97,640,480,train +NBGys-uxScs,3634,4177,186-67-521-402,29.97,640,480,train +NBGys-uxScs,4222,4367,255-80-575-400,29.97,640,480,train +NBGys-uxScs,5361,5656,165-67-501-403,29.97,640,480,train +NBGys-uxScs,5719,5923,160-78-485-403,29.97,640,480,train +NBGys-uxScs,6359,6577,152-51-499-398,29.97,640,480,train +NBGys-uxScs,7477,7830,131-64-476-409,29.97,640,480,train +uEqWZ9S_-Lw,89,581,51-38-489-476,25.0,600,480,test +uEqWZ9S_-Lw,660,1470,69-51-492-474,25.0,600,480,test +uEqWZ9S_-Lw,1470,1773,7-45-437-475,25.0,600,480,test +uEqWZ9S_-Lw,1773,1905,92-74-490-472,25.0,600,480,test +uEqWZ9S_-Lw,2600,2868,106-60-517-471,25.0,600,480,test +uEqWZ9S_-Lw,3142,3350,59-50-478-469,25.0,600,480,test +uEqWZ9S_-Lw,3414,3576,37-56-456-475,25.0,600,480,test +uEqWZ9S_-Lw,3662,3864,103-45-523-465,25.0,600,480,test +uEqWZ9S_-Lw,4340,4738,75-75-472-472,25.0,600,480,test +uEqWZ9S_-Lw,4947,5107,49-34-484-469,25.0,600,480,test +uEqWZ9S_-Lw,5252,5990,44-70-438-464,25.0,600,480,test +uEqWZ9S_-Lw,7083,7713,72-51-499-478,25.0,600,480,test +uEqWZ9S_-Lw,9388,9729,71-55-483-467,25.0,600,480,test +uEqWZ9S_-Lw,9881,10091,46-62-458-474,25.0,600,480,test +ctzMBulw1lI,320,577,85-71-478-464,30.0,640,480,train +ctzMBulw1lI,669,871,162-56-544-438,30.0,640,480,train +ctzMBulw1lI,1191,1338,144-55-533-444,30.0,640,480,train +ctzMBulw1lI,1822,1962,62-55-448-441,30.0,640,480,train +ctzMBulw1lI,2016,2175,125-61-498-434,30.0,640,480,train +ctzMBulw1lI,2509,2680,66-66-440-440,30.0,640,480,train +ctzMBulw1lI,2680,2823,152-107-491-446,30.0,640,480,train +ctzMBulw1lI,2832,3121,141-88-499-446,30.0,640,480,train +ctzMBulw1lI,4044,4326,108-16-546-454,30.0,640,480,train +ctzMBulw1lI,4326,4496,101-40-520-459,30.0,640,480,train +ctzMBulw1lI,4516,4755,56-35-483-462,30.0,640,480,train +ctzMBulw1lI,4796,4972,37-19-477-459,30.0,640,480,train +ctzMBulw1lI,5008,5236,37-2-503-468,30.0,640,480,train +ctzMBulw1lI,5548,5684,66-46-455-435,30.0,640,480,train +ctzMBulw1lI,6610,6754,78-44-491-457,30.0,640,480,train +ctzMBulw1lI,6811,7033,113-41-514-442,30.0,640,480,train +ctzMBulw1lI,7350,7511,203-66-589-452,30.0,640,480,train +ctzMBulw1lI,7659,7916,66-60-448-442,30.0,640,480,train +ctzMBulw1lI,9007,9148,94-39-492-437,30.0,640,480,train +ctzMBulw1lI,9643,9910,18-47-426-455,30.0,640,480,train +ctzMBulw1lI,11072,11285,98-44-489-435,30.0,640,480,train +ctzMBulw1lI,11701,11904,45-40-431-426,30.0,640,480,train +cpDT3xFbP4g,7,1031,133-87-482-436,29.97,640,480,train +cpDT3xFbP4g,1031,1301,127-97-467-437,29.97,640,480,train +cpDT3xFbP4g,1395,2419,112-92-473-453,29.97,640,480,train +cpDT3xFbP4g,2419,2837,136-92-484-440,29.97,640,480,train +cpDT3xFbP4g,2837,3861,92-89-460-457,29.97,640,480,train +cpDT3xFbP4g,3861,4805,172-104-508-440,29.97,640,480,train +cpDT3xFbP4g,4805,5829,153-88-514-449,29.97,640,480,train +cpDT3xFbP4g,5829,6020,148-101-487-440,29.97,640,480,train +oYlrB4ddEpM,1167,1646,277-23-962-708,25.0,1280,720,train +oYlrB4ddEpM,2815,2985,401-119-992-710,25.0,1280,720,train +oYlrB4ddEpM,3016,3228,483-105-1068-690,25.0,1280,720,train +oYlrB4ddEpM,5318,5453,235-84-846-695,25.0,1280,720,train +oYlrB4ddEpM,5656,5964,450-52-1101-703,25.0,1280,720,train +YonmpJvwmKM,2264,2578,340-67-963-690,29.97,1280,720,train +YonmpJvwmKM,4852,5063,231-9-920-698,29.97,1280,720,train +YonmpJvwmKM,5597,5854,435-48-1078-691,29.97,1280,720,train +Dn0mNZmAh2k,308,450,95-63-498-466,29.97,640,480,train +Dn0mNZmAh2k,495,975,125-114-481-470,29.97,640,480,train +Dn0mNZmAh2k,1369,1557,129-131-461-463,29.97,640,480,train +Dn0mNZmAh2k,1710,1900,35-101-401-467,29.97,640,480,train +Dn0mNZmAh2k,2085,2388,42-94-417-469,29.97,640,480,train +Dn0mNZmAh2k,3180,3377,108-134-445-471,29.97,640,480,train +Dn0mNZmAh2k,4815,5175,180-70-573-463,29.97,640,480,train +Dn0mNZmAh2k,6165,6379,127-109-498-480,29.97,640,480,train +Qlbxd48tQg8,0,263,346-187-617-458,29.97,854,480,train +Qlbxd48tQg8,358,810,251-79-624-452,29.97,854,480,train +Qlbxd48tQg8,941,1096,290-97-651-458,29.97,854,480,train +Qlbxd48tQg8,2777,3009,269-65-659-455,29.97,854,480,train +Qlbxd48tQg8,4052,4190,231-74-606-449,29.97,854,480,train +Qlbxd48tQg8,5244,5418,258-76-634-452,29.97,854,480,train +Qlbxd48tQg8,7183,7329,216-50-616-450,29.97,854,480,train +Qlbxd48tQg8,7329,8248,228-28-657-457,29.97,854,480,train +Qlbxd48tQg8,8248,9272,213-0-670-457,29.97,854,480,train +Qlbxd48tQg8,9429,9934,265-107-609-451,29.97,854,480,train +Qlbxd48tQg8,10527,10655,252-100-599-447,29.97,854,480,train +Qlbxd48tQg8,11211,11405,295-79-661-445,29.97,854,480,train +Qlbxd48tQg8,11431,11616,288-77-659-448,29.97,854,480,train +_XRyc2kiTlM,3844,4023,458-54-1402-998,25.0,1920,1080,train +3vcpk5cMDzo,170,1194,582-46-1565-1029,25.0,1920,1080,train +3vcpk5cMDzo,1410,1943,721-289-1409-977,25.0,1920,1080,train +3vcpk5cMDzo,1943,2157,576-332-1221-977,25.0,1920,1080,train +3vcpk5cMDzo,2226,2483,726-321-1430-1025,25.0,1920,1080,train +3vcpk5cMDzo,2678,3073,530-273-1311-1054,25.0,1920,1080,train +3vcpk5cMDzo,3119,3261,754-384-1303-933,25.0,1920,1080,train +3vcpk5cMDzo,3406,3710,723-377-1269-923,25.0,1920,1080,train +3vcpk5cMDzo,3755,3911,516-331-1159-974,25.0,1920,1080,train +3vcpk5cMDzo,3973,4159,674-323-1367-1016,25.0,1920,1080,train +3vcpk5cMDzo,4159,4317,540-294-1298-1052,25.0,1920,1080,train +D-8bvXjcz8c,120,360,615-63-1203-651,24.0,1280,720,test +D-8bvXjcz8c,1080,1320,467-61-1116-710,24.0,1280,720,test +D-8bvXjcz8c,5640,5824,481-43-1117-679,24.0,1280,720,test +D-8bvXjcz8c,8480,8640,117-72-713-668,24.0,1280,720,test +D-8bvXjcz8c,10458,10680,166-60-772-666,24.0,1280,720,test +Sn9jYXV6kpg,1008,1196,605-454-1123-972,29.0,1920,1080,train +Sn9jYXV6kpg,3372,3522,533-441-1015-923,29.0,1920,1080,train +Sn9jYXV6kpg,3522,3685,537-416-1036-915,29.0,1920,1080,train +Sn9jYXV6kpg,3796,3996,575-389-1104-918,29.0,1920,1080,train +Sn9jYXV6kpg,4161,4353,657-348-1192-883,29.0,1920,1080,train +Sn9jYXV6kpg,4814,4980,961-301-1502-842,29.0,1920,1080,train +Sn9jYXV6kpg,5194,5336,1036-255-1614-833,29.0,1920,1080,train +Sn9jYXV6kpg,7281,7494,522-360-1026-864,29.0,1920,1080,train +jDdTqi1t_jE,744,885,278-42-939-703,30.0,1280,720,test +jDdTqi1t_jE,2227,2380,225-7-932-714,30.0,1280,720,test +jDdTqi1t_jE,4480,4823,212-0-930-718,30.0,1280,720,test +jDdTqi1t_jE,5195,5550,213-15-909-711,30.0,1280,720,test +YddPIOYOH6o,0,547,494-313-1089-908,30.0,1920,1080,train +YddPIOYOH6o,609,755,603-354-1163-914,30.0,1920,1080,train +YddPIOYOH6o,1414,1578,571-327-1145-901,30.0,1920,1080,train +YddPIOYOH6o,1681,1862,542-319-1135-912,30.0,1920,1080,train +YddPIOYOH6o,1896,2064,533-341-1124-932,30.0,1920,1080,train +YddPIOYOH6o,2305,2613,780-293-1358-871,30.0,1920,1080,train +YddPIOYOH6o,2736,2955,592-281-1237-926,30.0,1920,1080,train +YddPIOYOH6o,3018,3178,735-386-1237-888,30.0,1920,1080,train +GQ0ef8nh9H8,645,1004,191-127-482-418,29.97,640,480,test +GQ0ef8nh9H8,1004,1380,202-152-464-414,29.97,640,480,test +GQ0ef8nh9H8,2530,2818,245-155-501-411,29.97,640,480,test +1HR5NXiNEjA,122,483,139-78-452-391,25.0,640,480,train +1HR5NXiNEjA,550,864,168-37-525-394,25.0,640,480,train +1HR5NXiNEjA,864,1176,169-91-469-391,25.0,640,480,train +1HR5NXiNEjA,1394,1548,181-98-469-386,25.0,640,480,train +1HR5NXiNEjA,3017,3234,180-23-599-442,25.0,640,480,train +cCx82sXERCI,133,330,376-33-1048-705,24.0,1280,720,train +cCx82sXERCI,330,1320,382-33-1048-699,24.0,1280,720,train +cCx82sXERCI,1340,1680,376-26-1063-713,24.0,1280,720,train +cCx82sXERCI,1800,2055,393-39-1053-699,24.0,1280,720,train +cCx82sXERCI,2055,2400,380-35-1051-706,24.0,1280,720,train +cCx82sXERCI,2400,2588,388-55-1040-707,24.0,1280,720,train +cCx82sXERCI,2589,2882,365-24-1041-700,24.0,1280,720,train +cCx82sXERCI,2883,3158,370-43-1025-698,24.0,1280,720,train +cCx82sXERCI,3158,3334,336-40-1003-707,24.0,1280,720,train +cCx82sXERCI,3368,3542,290-13-979-702,24.0,1280,720,train +cCx82sXERCI,4425,4560,317-19-994-696,24.0,1280,720,train +cCx82sXERCI,4560,4765,355-33-1024-702,24.0,1280,720,train +cCx82sXERCI,4935,5070,358-39-1017-698,24.0,1280,720,train +hNRum5Lhdws,0,525,335-142-878-685,30.0,1280,720,train +hNRum5Lhdws,525,765,376-142-917-683,30.0,1280,720,train +hNRum5Lhdws,870,1092,393-165-907-679,30.0,1280,720,train +hNRum5Lhdws,1162,1644,427-169-944-686,30.0,1280,720,train +hNRum5Lhdws,1644,1827,348-168-849-669,30.0,1280,720,train +hNRum5Lhdws,1827,1979,475-183-936-644,30.0,1280,720,train +hNRum5Lhdws,2098,2489,477-153-979-655,30.0,1280,720,train +hNRum5Lhdws,3144,3283,496-195-948-647,30.0,1280,720,train +hNRum5Lhdws,3497,3765,566-226-986-646,30.0,1280,720,train +hNRum5Lhdws,4042,4262,505-229-906-630,30.0,1280,720,train +hNRum5Lhdws,4802,4994,436-229-863-656,30.0,1280,720,train +hNRum5Lhdws,5130,5820,396-213-860-677,30.0,1280,720,train +hNRum5Lhdws,6254,6576,357-208-825-676,30.0,1280,720,train +hNRum5Lhdws,6576,6739,434-214-896-676,30.0,1280,720,train +hNRum5Lhdws,6874,7869,367-213-854-700,30.0,1280,720,train +hNRum5Lhdws,8257,8415,261-199-752-690,30.0,1280,720,train +hNRum5Lhdws,8415,8649,376-213-866-703,30.0,1280,720,train +hNRum5Lhdws,8775,9030,441-245-841-645,30.0,1280,720,train +hNRum5Lhdws,9277,9459,412-236-830-654,30.0,1280,720,train +hNRum5Lhdws,9459,9960,410-214-857-661,30.0,1280,720,train +hNRum5Lhdws,10235,10651,359-215-825-681,30.0,1280,720,train +hNRum5Lhdws,10785,11110,437-214-912-689,30.0,1280,720,train +hNRum5Lhdws,11224,11526,384-181-890-687,30.0,1280,720,train +fl9GjEkHFDg,2550,2730,371-25-1004-658,29.97,1280,720,train +fl9GjEkHFDg,3360,3630,322-28-958-664,29.97,1280,720,train +fl9GjEkHFDg,3810,3972,336-17-954-635,29.97,1280,720,train +fl9GjEkHFDg,5250,5430,331-52-942-663,29.97,1280,720,train +fl9GjEkHFDg,7050,7230,319-39-979-699,29.97,1280,720,train +sZtU-A3BB8k,0,604,682-385-1238-941,29.97,1920,1080,train +sZtU-A3BB8k,634,925,727-390-1273-936,29.97,1920,1080,train +_L745tFFmCQ,5583,5717,1192-143-1800-751,25.0,1920,1080,train +_L745tFFmCQ,8295,8448,674-75-1430-831,25.0,1920,1080,train +_L745tFFmCQ,8472,8617,526-179-1304-957,25.0,1920,1080,train +_zJWt92xoRw,480,902,220-95-760-635,29.97,1280,720,train +_zJWt92xoRw,902,1197,331-118-833-620,29.97,1280,720,train +_zJWt92xoRw,1729,1965,297-142-803-648,29.97,1280,720,train +_zJWt92xoRw,2748,2886,441-137-940-636,29.97,1280,720,train +_zJWt92xoRw,3003,3162,558-155-1044-641,29.97,1280,720,train +_zJWt92xoRw,3318,3544,586-149-1076-639,29.97,1280,720,train +_zJWt92xoRw,4396,4816,622-126-1148-652,29.97,1280,720,train +_zJWt92xoRw,5455,5636,682-135-1171-624,29.97,1280,720,train +_zJWt92xoRw,6790,6984,185-139-671-625,29.97,1280,720,train +_zJWt92xoRw,7392,7623,199-126-716-643,29.97,1280,720,train +_zJWt92xoRw,10016,10163,709-189-1053-533,29.97,1280,720,train +_zJWt92xoRw,11616,11760,280-158-684-562,29.97,1280,720,train +_zJWt92xoRw,12263,12401,169-150-602-583,29.97,1280,720,train +_zJWt92xoRw,13262,13392,171-130-591-550,29.97,1280,720,train +_zJWt92xoRw,14658,14877,556-159-999-602,29.97,1280,720,train +_zJWt92xoRw,15383,15513,529-166-971-608,29.97,1280,720,train +_zJWt92xoRw,18850,18996,201-174-605-578,29.97,1280,720,train +_zJWt92xoRw,19464,19699,181-171-584-574,29.97,1280,720,train +_zJWt92xoRw,19860,20123,149-131-589-571,29.97,1280,720,train +AXp3aT5pmWs,0,351,125-16-574-465,29.97,720,480,train +AXp3aT5pmWs,1577,1940,175-17-625-467,29.97,720,480,train +AXp3aT5pmWs,3218,3553,122-17-579-474,29.97,720,480,train +AXp3aT5pmWs,4106,4878,218-37-635-454,29.97,720,480,train +AXp3aT5pmWs,4878,5172,211-10-661-460,29.97,720,480,train +AXp3aT5pmWs,5206,5592,184-39-579-434,29.97,720,480,train +AXp3aT5pmWs,5936,6082,105-39-487-421,29.97,720,480,train +cyN6Eb5Luzo,448,1040,133-186-463-516,29.97,960,720,train +cyN6Eb5Luzo,1040,1407,138-185-485-532,29.97,960,720,train +cyN6Eb5Luzo,1923,2217,97-168-473-544,29.97,960,720,train +cyN6Eb5Luzo,2219,2368,126-183-463-520,29.97,960,720,train +cyN6Eb5Luzo,2464,3028,95-168-452-525,29.97,960,720,train +cyN6Eb5Luzo,3028,3370,122-177-486-541,29.97,960,720,train +cyN6Eb5Luzo,3376,3547,124-191-469-536,29.97,960,720,train +cyN6Eb5Luzo,3801,4825,295-184-644-533,29.97,960,720,train +vxsaQvdnEcU,507,779,167-36-606-475,29.97,640,480,train +vxsaQvdnEcU,876,1057,177-99-525-447,29.97,640,480,train +vxsaQvdnEcU,1106,1258,148-50-562-464,29.97,640,480,train +vxsaQvdnEcU,1280,1543,55-54-449-448,29.97,640,480,train +vxsaQvdnEcU,1676,1810,189-108-553-472,29.97,640,480,train +vxsaQvdnEcU,3838,4142,138-41-516-419,29.97,640,480,train +vxsaQvdnEcU,4148,4483,111-91-496-476,29.97,640,480,train +vxsaQvdnEcU,4844,5018,221-39-640-458,29.97,640,480,train +v2zjtNjnj3A,844,1000,529-149-862-482,23.97,1280,720,train +v2zjtNjnj3A,1493,1674,484-89-858-463,23.97,1280,720,train +v2zjtNjnj3A,3198,3326,471-67-881-477,23.97,1280,720,train +nLEzl5ntdeQ,149,431,85-379-731-1025,29.0,810,1440,train +nLEzl5ntdeQ,1080,1280,34-444-676-1086,29.0,810,1440,train +nLEzl5ntdeQ,1280,1440,57-450-692-1085,29.0,810,1440,train +nLEzl5ntdeQ,1448,1610,46-457-689-1100,29.0,810,1440,train +nLEzl5ntdeQ,1610,1920,59-425-744-1110,29.0,810,1440,train +nLEzl5ntdeQ,2000,2228,96-386-794-1084,29.0,810,1440,train +nLEzl5ntdeQ,2400,2571,95-434-714-1053,29.0,810,1440,train +nLEzl5ntdeQ,2670,2880,94-416-758-1080,29.0,810,1440,train +nLEzl5ntdeQ,2970,3204,97-409-789-1101,29.0,810,1440,train +nLEzl5ntdeQ,4538,4680,109-473-724-1088,29.0,810,1440,train +nLEzl5ntdeQ,4980,5220,96-436-767-1107,29.0,810,1440,train +nLEzl5ntdeQ,5280,5430,111-468-759-1116,29.0,810,1440,train +nLEzl5ntdeQ,6030,6263,73-484-704-1115,29.0,810,1440,train +nLEzl5ntdeQ,6302,6444,77-434-769-1126,29.0,810,1440,train +nLEzl5ntdeQ,6444,6632,131-401-794-1064,29.0,810,1440,train +nLEzl5ntdeQ,6632,6781,97-402-761-1066,29.0,810,1440,train +nLEzl5ntdeQ,6781,6961,76-386-760-1070,29.0,810,1440,train +ZwBsM72F0xg,159,608,218-156-602-540,15.79,1280,720,train +ZwBsM72F0xg,812,1041,165-184-547-566,15.79,1280,720,train +ZwBsM72F0xg,1075,1218,94-192-466-564,15.79,1280,720,train +ZwBsM72F0xg,5123,5433,240-179-665-604,15.79,1280,720,train +ZwBsM72F0xg,5644,5780,329-172-751-594,15.79,1280,720,train +ZwBsM72F0xg,7248,7376,584-143-1013-572,15.79,1280,720,train +YVmF50cMs_0,5490,5998,352-87-951-686,29.97,1280,720,train +YVmF50cMs_0,6563,6709,356-105-889-638,29.97,1280,720,train +YVmF50cMs_0,6975,7137,386-49-944-607,29.97,1280,720,train +YVmF50cMs_0,7664,7802,313-24-883-594,29.97,1280,720,train +YVmF50cMs_0,8894,9039,254-75-784-605,29.97,1280,720,train +YVmF50cMs_0,9717,9885,361-79-910-628,29.97,1280,720,train +YVmF50cMs_0,9885,10159,346-53-893-600,29.97,1280,720,train +YVmF50cMs_0,10186,10315,326-76-869-619,29.97,1280,720,train +YVmF50cMs_0,10630,10791,368-144-865-641,29.97,1280,720,train +YVmF50cMs_0,12090,12260,174-81-725-632,29.97,1280,720,train +YVmF50cMs_0,12260,12456,358-93-866-601,29.97,1280,720,train +YVmF50cMs_0,12521,12650,415-179-873-637,29.97,1280,720,train +YVmF50cMs_0,12650,12897,360-149-850-639,29.97,1280,720,train +YVmF50cMs_0,13238,13383,381-171-839-629,29.97,1280,720,train +YVmF50cMs_0,13839,13967,300-76-846-622,29.97,1280,720,train +FsmIt38CNog,166,740,670-172-1314-816,29.97,1920,1080,train +FsmIt38CNog,1086,1592,773-238-1339-804,29.97,1920,1080,train +FsmIt38CNog,1665,1834,786-236-1365-815,29.97,1920,1080,train +FsmIt38CNog,1881,2069,664-215-1286-837,29.97,1920,1080,train +FsmIt38CNog,2069,2212,725-248-1275-798,29.97,1920,1080,train +FsmIt38CNog,2424,2652,450-229-1035-814,29.97,1920,1080,train +FsmIt38CNog,2652,2973,252-205-876-829,29.97,1920,1080,train +FsmIt38CNog,4000,4143,375-218-1181-1024,29.97,1920,1080,train +FsmIt38CNog,4632,4761,589-112-1333-856,29.97,1920,1080,train +FsmIt38CNog,4882,5164,760-107-1645-992,29.97,1920,1080,train +FsmIt38CNog,5168,5420,963-131-1802-970,29.97,1920,1080,train +FsmIt38CNog,5725,6257,937-149-1813-1025,29.97,1920,1080,train +FsmIt38CNog,6735,6891,752-200-1576-1024,29.97,1920,1080,train +FsmIt38CNog,7520,7880,810-113-1685-988,29.97,1920,1080,train +FsmIt38CNog,7880,8058,621-145-1372-896,29.97,1920,1080,train +FsmIt38CNog,8179,8351,474-215-1181-922,29.97,1920,1080,train +FsmIt38CNog,8792,9010,174-206-935-967,29.97,1920,1080,train +FsmIt38CNog,9102,9262,446-242-1227-1023,29.97,1920,1080,train +FsmIt38CNog,9633,9828,894-296-1634-1036,29.97,1920,1080,train +FsmIt38CNog,10653,10786,982-148-1749-915,29.97,1920,1080,train +FsmIt38CNog,10786,10988,915-175-1626-886,29.97,1920,1080,train +FsmIt38CNog,11065,11230,756-159-1460-863,29.97,1920,1080,train +FsmIt38CNog,11230,11394,798-163-1483-848,29.97,1920,1080,train +FsmIt38CNog,11598,11974,810-87-1574-851,29.97,1920,1080,train +OPcZlXYcdMA,459,913,304-44-919-659,29.97,1280,720,train +OPcZlXYcdMA,913,1363,294-41-941-688,29.97,1280,720,train +OPcZlXYcdMA,1577,2084,283-38-909-664,29.97,1280,720,train +OPcZlXYcdMA,2383,2551,333-49-964-680,29.97,1280,720,train +OPcZlXYcdMA,2873,3231,331-50-958-677,29.97,1280,720,train +OPcZlXYcdMA,3417,3801,326-41-965-680,29.97,1280,720,train +OPcZlXYcdMA,4022,4435,317-39-957-679,29.97,1280,720,train +OPcZlXYcdMA,4611,4948,255-67-867-679,29.97,1280,720,train +OPcZlXYcdMA,4948,5155,420-72-1017-669,29.97,1280,720,train +OPcZlXYcdMA,5363,5913,372-59-974-661,29.97,1280,720,train +OPcZlXYcdMA,6231,6518,294-66-908-680,29.97,1280,720,train +OPcZlXYcdMA,8507,8697,279-81-904-706,29.97,1280,720,train +OPcZlXYcdMA,8697,8896,427-77-1050-700,29.97,1280,720,train +OPcZlXYcdMA,9675,10053,296-37-927-668,29.97,1280,720,train +OPcZlXYcdMA,10333,11357,307-55-922-670,29.97,1280,720,train +OPcZlXYcdMA,11398,11644,324-55-919-650,29.97,1280,720,train +OPcZlXYcdMA,11644,12197,324-26-951-653,29.97,1280,720,train +OPcZlXYcdMA,12198,12399,371-60-943-632,29.97,1280,720,train +OPcZlXYcdMA,12399,13423,316-56-921-661,29.97,1280,720,train +OPcZlXYcdMA,13457,14198,373-64-953-644,29.97,1280,720,train +OPcZlXYcdMA,15589,16099,313-45-923-655,29.97,1280,720,train +L82WHgYRq6I,21,479,494-203-1278-987,25.0,1920,1080,test +L82WHgYRq6I,525,657,547-182-1371-1006,25.0,1920,1080,test +L82WHgYRq6I,684,1309,484-31-1464-1011,25.0,1920,1080,test +L82WHgYRq6I,1384,1524,681-289-1363-971,25.0,1920,1080,test +L82WHgYRq6I,2175,2417,434-296-1132-994,25.0,1920,1080,test +L82WHgYRq6I,3035,3238,601-274-1298-971,25.0,1920,1080,test +L82WHgYRq6I,3345,3475,764-261-1448-945,25.0,1920,1080,test +L82WHgYRq6I,3583,3821,983-294-1605-916,25.0,1920,1080,test +L82WHgYRq6I,3967,4149,1101-318-1695-912,25.0,1920,1080,test +L82WHgYRq6I,5369,5623,593-307-1244-958,25.0,1920,1080,test +L82WHgYRq6I,6259,6397,220-215-1042-1037,25.0,1920,1080,test +L82WHgYRq6I,6516,6669,562-254-1273-965,25.0,1920,1080,test +L82WHgYRq6I,8183,8424,818-329-1427-938,25.0,1920,1080,test +Q0tIcxMcm4E,0,591,156-113-1024-981,25.0,1920,1080,train +Q0tIcxMcm4E,798,979,399-176-1194-971,25.0,1920,1080,train +Q0tIcxMcm4E,1142,1337,714-170-1501-957,25.0,1920,1080,train +Q0tIcxMcm4E,1337,1628,923-164-1688-929,25.0,1920,1080,train +Q0tIcxMcm4E,2116,2289,737-158-1499-920,25.0,1920,1080,train +Q0tIcxMcm4E,2367,2763,940-173-1702-935,25.0,1920,1080,train +Q0tIcxMcm4E,2859,3003,970-177-1690-897,25.0,1920,1080,train +Q0tIcxMcm4E,3329,3506,534-163-1269-898,25.0,1920,1080,train +Q0tIcxMcm4E,3636,3849,320-137-1116-933,25.0,1920,1080,train +Q0tIcxMcm4E,3892,4228,275-148-1065-938,25.0,1920,1080,train +Q0tIcxMcm4E,4725,4963,503-161-1267-925,25.0,1920,1080,train +Q0tIcxMcm4E,5647,5976,451-144-1238-931,25.0,1920,1080,train +Q0tIcxMcm4E,5976,6132,357-106-1180-929,25.0,1920,1080,train +Q0tIcxMcm4E,6354,6594,617-119-1442-944,25.0,1920,1080,train +Q0tIcxMcm4E,6616,7000,866-81-1721-936,25.0,1920,1080,train +Q0tIcxMcm4E,7000,7156,646-105-1472-931,25.0,1920,1080,train +Q0tIcxMcm4E,7247,7422,988-155-1756-923,25.0,1920,1080,train +Q0tIcxMcm4E,8272,8448,914-187-1615-888,25.0,1920,1080,train +Q0tIcxMcm4E,8984,9176,545-118-1333-906,25.0,1920,1080,train +Q0tIcxMcm4E,9589,9957,310-163-1052-905,25.0,1920,1080,train +Q0tIcxMcm4E,10364,10567,368-209-1056-897,25.0,1920,1080,train +Q0tIcxMcm4E,10884,11290,234-130-1013-909,25.0,1920,1080,train +Q0tIcxMcm4E,11354,11485,564-218-1269-923,25.0,1920,1080,train +Q0tIcxMcm4E,11528,11747,603-204-1315-916,25.0,1920,1080,train +Q0tIcxMcm4E,12030,12519,638-117-1434-913,25.0,1920,1080,train +ceoe2fz648U,116,400,117-103-428-414,29.97,640,480,train +ceoe2fz648U,1401,1532,92-78-425-411,29.97,640,480,train +ceoe2fz648U,1652,1824,119-102-436-419,29.97,640,480,train +ceoe2fz648U,3287,3419,187-134-455-402,29.97,640,480,train +ceoe2fz648U,5820,6002,84-109-395-420,29.97,640,480,train +JVXDPOICOcw,629,871,181-167-446-432,29.97,640,480,train +JTMn6S9cS_A,0,438,317-67-890-640,25.0,1280,720,train +JTMn6S9cS_A,438,677,421-110-976-665,25.0,1280,720,train +JTMn6S9cS_A,677,878,405-82-973-650,25.0,1280,720,train +JTMn6S9cS_A,996,1280,362-27-972-637,25.0,1280,720,train +JTMn6S9cS_A,1280,1456,520-112-1054-646,25.0,1280,720,train +JTMn6S9cS_A,3190,3404,377-115-930-668,25.0,1280,720,train +JTMn6S9cS_A,3780,3995,342-109-891-658,25.0,1280,720,train +JTMn6S9cS_A,4684,4907,642-112-1154-624,25.0,1280,720,train +JTMn6S9cS_A,5235,5378,577-120-1029-572,25.0,1280,720,train +JTMn6S9cS_A,6673,6878,187-101-773-687,25.0,1280,720,train +JTMn6S9cS_A,7378,7582,219-43-891-715,25.0,1280,720,train +JTMn6S9cS_A,8115,8425,342-64-905-627,25.0,1280,720,train +-YxgJ_xXcgU,119,281,327-120-643-436,30.0,1280,720,train +-YxgJ_xXcgU,471,610,105-27-601-523,30.0,1280,720,train +-YxgJ_xXcgU,3078,3220,619-133-1078-592,30.0,1280,720,train +l5gCGxF-uM0,1479,1893,139-38-541-440,29.97,654,480,train +l5gCGxF-uM0,2139,2531,149-71-531-453,29.97,654,480,train +l5gCGxF-uM0,2772,3096,136-28-565-457,29.97,654,480,train +l5gCGxF-uM0,3265,3594,180-94-513-427,29.97,654,480,train +l5gCGxF-uM0,3900,4195,156-68-536-448,29.97,654,480,train +l5gCGxF-uM0,4385,4618,135-122-452-439,29.97,654,480,train +l5gCGxF-uM0,4963,5133,117-68-488-439,29.97,654,480,train +l5gCGxF-uM0,5300,5580,150-50-547-447,29.97,654,480,train +nFDRPyBl8WY,173,810,123-51-520-448,29.97,640,480,train +nFDRPyBl8WY,916,1055,175-95-506-426,29.97,640,480,train +nFDRPyBl8WY,1332,1700,120-86-462-428,29.97,640,480,train +nFDRPyBl8WY,1702,1919,132-90-463-421,29.97,640,480,train +nFDRPyBl8WY,2042,2239,122-74-482-434,29.97,640,480,train +nFDRPyBl8WY,2239,2427,74-55-440-421,29.97,640,480,train +nFDRPyBl8WY,3036,3235,133-26-569-462,29.97,640,480,train +nFDRPyBl8WY,4062,4230,149-51-538-440,29.97,640,480,train +nFDRPyBl8WY,4248,4447,39-57-414-432,29.97,640,480,train +nFDRPyBl8WY,5504,5652,64-57-427-420,29.97,640,480,train +nFDRPyBl8WY,5671,5846,112-33-519-440,29.97,640,480,train +nFDRPyBl8WY,5873,6318,73-21-475-423,29.97,640,480,train +nFDRPyBl8WY,6831,6980,53-77-413-437,29.97,640,480,train +nFDRPyBl8WY,7059,7204,82-81-439-438,29.97,640,480,train +nFDRPyBl8WY,8310,8469,154-111-420-377,29.97,640,480,train +nFDRPyBl8WY,9525,9690,74-100-370-396,29.97,640,480,train +nFDRPyBl8WY,10690,10989,149-47-548-446,29.97,640,480,train +vgk5TBac16A,1158,1338,136-79-503-446,29.97,640,480,test +vgk5TBac16A,1338,1809,83-81-444-442,29.97,640,480,test +vgk5TBac16A,2020,2213,74-75-424-425,29.97,640,480,test +vgk5TBac16A,2213,2654,147-77-494-424,29.97,640,480,test +vgk5TBac16A,3741,4765,157-15-617-475,29.97,640,480,test +vgk5TBac16A,4765,5789,148-6-616-474,29.97,640,480,test +vgk5TBac16A,6244,7268,148-7-613-472,29.97,640,480,test +vgk5TBac16A,7276,8115,149-15-606-472,29.97,640,480,test +SllIuMCgJKM,1687,2250,129-85-487-443,29.97,640,480,train +SllIuMCgJKM,3886,4027,141-61-506-426,29.97,640,480,train +SllIuMCgJKM,4593,4775,177-51-526-400,29.97,640,480,train +SllIuMCgJKM,5101,5235,86-54-450-418,29.97,640,480,train +SllIuMCgJKM,9552,9796,149-34-548-433,29.97,640,480,train +aDyyTMUBoLE,164,351,333-46-1266-979,23.98,1920,1080,test +aDyyTMUBoLE,375,518,319-98-1293-1072,23.98,1920,1080,test +aDyyTMUBoLE,518,884,318-49-1336-1067,23.98,1920,1080,test +aDyyTMUBoLE,949,1092,437-91-1381-1035,23.98,1920,1080,test +aDyyTMUBoLE,1769,1957,241-19-1262-1040,23.98,1920,1080,test +gSIN9Mhm5oQ,127,303,852-25-1728-901,25.0,1920,1080,train +gSIN9Mhm5oQ,333,862,785-138-1530-883,25.0,1920,1080,train +gSIN9Mhm5oQ,1028,1278,722-111-1498-887,25.0,1920,1080,train +gSIN9Mhm5oQ,1450,1649,848-63-1697-912,25.0,1920,1080,train +gSIN9Mhm5oQ,1649,1848,755-117-1551-913,25.0,1920,1080,train +gSIN9Mhm5oQ,2002,2264,770-61-1682-973,25.0,1920,1080,train +gSIN9Mhm5oQ,2566,2702,453-129-1297-973,25.0,1920,1080,train +gSIN9Mhm5oQ,3199,3411,391-141-1282-1032,25.0,1920,1080,train +gSIN9Mhm5oQ,3625,3853,640-143-1512-1015,25.0,1920,1080,train +gSIN9Mhm5oQ,3992,4127,623-43-1599-1019,25.0,1920,1080,train +gSIN9Mhm5oQ,4160,4340,614-182-1431-999,25.0,1920,1080,train +gSIN9Mhm5oQ,4387,4556,639-151-1478-990,25.0,1920,1080,train +gSIN9Mhm5oQ,4870,5030,119-204-920-1005,25.0,1920,1080,train +gSIN9Mhm5oQ,5030,5257,299-204-1094-999,25.0,1920,1080,train +gSIN9Mhm5oQ,5257,5647,355-164-1198-1007,25.0,1920,1080,train +gSIN9Mhm5oQ,5833,6171,433-135-1291-993,25.0,1920,1080,train +gSIN9Mhm5oQ,6309,6449,500-153-1357-1010,25.0,1920,1080,train +gSIN9Mhm5oQ,6567,6711,552-140-1405-993,25.0,1920,1080,train +gSIN9Mhm5oQ,7285,7664,589-117-1447-975,25.0,1920,1080,train +gSIN9Mhm5oQ,8046,8212,747-38-1672-963,25.0,1920,1080,train +gSIN9Mhm5oQ,8217,8453,643-61-1527-945,25.0,1920,1080,train +gSIN9Mhm5oQ,8453,8586,544-148-1383-987,25.0,1920,1080,train +gSIN9Mhm5oQ,8682,8867,429-149-1276-996,25.0,1920,1080,train +gSIN9Mhm5oQ,9189,9506,296-145-1200-1049,25.0,1920,1080,train +gSIN9Mhm5oQ,9855,10158,676-72-1596-992,25.0,1920,1080,train +gSIN9Mhm5oQ,10184,10489,394-95-1245-946,25.0,1920,1080,train +gSIN9Mhm5oQ,10489,10723,387-118-1255-986,25.0,1920,1080,train +gSIN9Mhm5oQ,10723,10912,306-60-1185-939,25.0,1920,1080,train +gSIN9Mhm5oQ,11170,11359,410-26-1376-992,25.0,1920,1080,train +gSIN9Mhm5oQ,11488,11674,264-16-1276-1028,25.0,1920,1080,train +gSIN9Mhm5oQ,11761,12221,648-85-1604-1041,25.0,1920,1080,train +gSIN9Mhm5oQ,12221,12349,810-89-1761-1040,25.0,1920,1080,train +IdnOeLhQWv4,720,1020,108-35-522-449,29.97,640,480,train +FfvQG67eS7k,198,1222,200-14-894-708,23.97,1280,720,train +FfvQG67eS7k,1222,2246,162-24-854-716,23.97,1280,720,train +FfvQG67eS7k,2246,3270,187-15-883-711,23.97,1280,720,train +FfvQG67eS7k,3270,4294,192-89-821-718,23.97,1280,720,train +FfvQG67eS7k,4294,5318,197-106-806-715,23.97,1280,720,train +FfvQG67eS7k,5318,5619,194-125-784-715,23.97,1280,720,train +FfvQG67eS7k,5619,6483,171-93-797-719,23.97,1280,720,train +FfvQG67eS7k,8108,8298,517-64-1110-657,23.97,1280,720,train +_GY8d9rkDh8,272,773,389-136-872-619,29.97,1280,720,train +_GY8d9rkDh8,1083,1261,295-113-898-716,29.97,1280,720,train +_GY8d9rkDh8,2291,2428,495-127-1080-712,29.97,1280,720,train +_GY8d9rkDh8,2863,3046,134-118-730-714,29.97,1280,720,train +_GY8d9rkDh8,3119,3256,180-179-701-700,29.97,1280,720,train +_GY8d9rkDh8,3725,3877,460-172-987-699,29.97,1280,720,train +_GY8d9rkDh8,3975,4104,366-135-940-709,29.97,1280,720,train +_GY8d9rkDh8,4150,4360,648-161-1160-673,29.97,1280,720,train +_GY8d9rkDh8,4577,4712,395-176-835-616,29.97,1280,720,train +_GY8d9rkDh8,4953,5123,479-222-875-618,29.97,1280,720,train +_GY8d9rkDh8,5370,5773,494-185-903-594,29.97,1280,720,train +_GY8d9rkDh8,6230,6665,297-40-951-694,29.97,1280,720,train +0Q914by5A98,10440,10764,261-91-559-389,29.97,854,480,test +2GX4WZSUVPo,368,1167,299-44-957-702,29.97,1280,720,train +2GX4WZSUVPo,1274,1657,251-5-963-717,29.97,1280,720,train +2GX4WZSUVPo,1726,2096,357-62-1007-712,29.97,1280,720,train +2GX4WZSUVPo,2193,2616,489-54-1130-695,29.97,1280,720,train +2GX4WZSUVPo,3215,3395,323-52-980-709,29.97,1280,720,train +2GX4WZSUVPo,3438,3739,303-74-921-692,29.97,1280,720,train +2GX4WZSUVPo,3739,4082,442-64-1052-674,29.97,1280,720,train +2GX4WZSUVPo,4082,4318,481-33-1109-661,29.97,1280,720,train +2GX4WZSUVPo,4361,4619,460-91-1029-660,29.97,1280,720,train +2GX4WZSUVPo,4619,4912,370-63-971-664,29.97,1280,720,train +2GX4WZSUVPo,5019,5273,416-81-1011-676,29.97,1280,720,train +2GX4WZSUVPo,5317,5468,370-71-990-691,29.97,1280,720,train +2GX4WZSUVPo,5701,5871,296-30-978-712,29.97,1280,720,train +2GX4WZSUVPo,5990,6343,232-38-873-679,29.97,1280,720,train +2GX4WZSUVPo,6343,6479,398-54-1038-694,29.97,1280,720,train +2GX4WZSUVPo,6510,6656,305-83-902-680,29.97,1280,720,train +2GX4WZSUVPo,6747,7161,324-68-927-671,29.97,1280,720,train +2GX4WZSUVPo,7161,7295,484-70-1042-628,29.97,1280,720,train +2GX4WZSUVPo,7399,7587,395-63-975-643,29.97,1280,720,train +2GX4WZSUVPo,7668,8059,355-53-974-672,29.97,1280,720,train +2GX4WZSUVPo,8850,9030,444-48-1097-701,29.97,1280,720,train +2GX4WZSUVPo,9125,9665,227-23-914-710,29.97,1280,720,train +2GX4WZSUVPo,9812,10048,476-35-1134-693,29.97,1280,720,train +2GX4WZSUVPo,10357,10532,499-59-1149-709,29.97,1280,720,train +2GX4WZSUVPo,10555,10919,262-34-921-693,29.97,1280,720,train +2GX4WZSUVPo,11490,11704,415-33-1088-706,29.97,1280,720,train +2GX4WZSUVPo,11756,12092,456-37-1129-710,29.97,1280,720,train +2GX4WZSUVPo,12114,12597,332-59-989-716,29.97,1280,720,train +2GX4WZSUVPo,12639,12777,468-84-1093-709,29.97,1280,720,train +2GX4WZSUVPo,12868,13033,400-65-1036-701,29.97,1280,720,train +2GX4WZSUVPo,13323,13535,480-48-1147-715,29.97,1280,720,train +2GX4WZSUVPo,13926,14109,547-25-1206-684,29.97,1280,720,train +2GX4WZSUVPo,14392,14604,397-46-1034-683,29.97,1280,720,train +2GX4WZSUVPo,14619,14785,426-58-1030-662,29.97,1280,720,train +2GX4WZSUVPo,14824,15062,341-8-1034-701,29.97,1280,720,train +2GX4WZSUVPo,15139,15479,309-62-957-710,29.97,1280,720,train +2GX4WZSUVPo,15507,15705,310-82-926-698,29.97,1280,720,train +2GX4WZSUVPo,17311,17495,193-43-838-688,29.97,1280,720,train +2GX4WZSUVPo,17524,18103,301-9-999-707,29.97,1280,720,train +ZOhoUyxIgaU,177,1201,690-370-1331-1011,25.0,1920,1080,train +ZOhoUyxIgaU,1243,1796,730-369-1385-1024,25.0,1920,1080,train +ZOhoUyxIgaU,1974,2113,700-314-1367-981,25.0,1920,1080,train +ZOhoUyxIgaU,2646,2856,628-204-1470-1046,25.0,1920,1080,train +ZOhoUyxIgaU,3003,3225,618-265-1433-1080,25.0,1920,1080,train +ZOhoUyxIgaU,3493,4517,722-308-1473-1059,25.0,1920,1080,train +ZOhoUyxIgaU,4517,4752,744-374-1393-1023,25.0,1920,1080,train +ZOhoUyxIgaU,4818,5005,771-362-1465-1056,25.0,1920,1080,train +ZOhoUyxIgaU,6950,7120,755-354-1468-1067,25.0,1920,1080,train +ZOhoUyxIgaU,7298,7427,891-359-1612-1080,25.0,1920,1080,train +QCr8cdnal6o,262,1286,195-105-497-407,25.0,640,480,train +QCr8cdnal6o,1330,1522,142-116-428-402,25.0,640,480,train +QCr8cdnal6o,1615,2639,147-96-454-403,25.0,640,480,train +QCr8cdnal6o,3184,3432,151-82-482-413,25.0,640,480,train +QCr8cdnal6o,3713,3866,241-85-563-407,25.0,640,480,train +QCr8cdnal6o,4301,4453,205-106-504-405,25.0,640,480,train +QCr8cdnal6o,4821,5274,162-82-490-410,25.0,640,480,train +QCr8cdnal6o,5627,5779,206-129-481-404,25.0,640,480,train +QCr8cdnal6o,5952,6285,199-145-467-413,25.0,640,480,train +QCr8cdnal6o,6495,6671,188-124-459-395,25.0,640,480,train +QCr8cdnal6o,6671,6981,151-128-423-400,25.0,640,480,train +oNkBx4CZuEg,0,1024,552-115-1420-983,25.0,1920,1080,test +oNkBx4CZuEg,1024,2048,613-130-1480-997,25.0,1920,1080,test +oNkBx4CZuEg,2070,2311,527-107-1416-996,25.0,1920,1080,test +oNkBx4CZuEg,2312,2605,624-104-1513-993,25.0,1920,1080,test +oNkBx4CZuEg,2605,3529,885-140-1706-961,25.0,1920,1080,test +oNkBx4CZuEg,3647,3919,564-137-1392-965,25.0,1920,1080,test +oNkBx4CZuEg,3919,4109,558-93-1420-955,25.0,1920,1080,test +oNkBx4CZuEg,4110,4634,517-122-1364-969,25.0,1920,1080,test +oNkBx4CZuEg,4634,4861,697-115-1534-952,25.0,1920,1080,test +oNkBx4CZuEg,4861,5171,528-166-1289-927,25.0,1920,1080,test +oNkBx4CZuEg,5172,5811,558-174-1300-916,25.0,1920,1080,test +oNkBx4CZuEg,5811,6053,691-181-1402-892,25.0,1920,1080,test +oNkBx4CZuEg,6170,6468,662-142-1382-862,25.0,1920,1080,test +Xb93PUzUAYA,3,576,118-23-555-460,29.97,640,480,train +Xb93PUzUAYA,576,928,143-0-624-480,29.97,640,480,train +Xb93PUzUAYA,928,1326,106-5-575-474,29.97,640,480,train +Xb93PUzUAYA,2350,2714,134-25-589-480,29.97,640,480,train +Xb93PUzUAYA,2714,2880,164-2-629-467,29.97,640,480,train +Xb93PUzUAYA,4117,4281,130-21-579-470,29.97,640,480,train +Xb93PUzUAYA,5409,5946,98-17-528-447,29.97,640,480,train +Xb93PUzUAYA,5954,6978,139-26-560-447,29.97,640,480,train +Xb93PUzUAYA,7018,7350,158-50-543-435,29.97,640,480,train +Xb93PUzUAYA,7350,7807,110-44-520-454,29.97,640,480,train +Xb93PUzUAYA,7807,8055,106-42-538-474,29.97,640,480,train +Xb93PUzUAYA,8055,8241,92-20-526-454,29.97,640,480,train +Xb93PUzUAYA,8241,8766,110-29-556-475,29.97,640,480,train +Xb93PUzUAYA,8766,9236,101-28-535-462,29.97,640,480,train +Xb93PUzUAYA,9236,9457,151-24-578-451,29.97,640,480,train +Xb93PUzUAYA,9485,10110,143-44-547-448,29.97,640,480,train +Xb93PUzUAYA,10110,10924,165-24-587-446,29.97,640,480,train +Xb93PUzUAYA,10924,11662,86-38-491-443,29.97,640,480,train +Xb93PUzUAYA,11662,12033,127-18-544-435,29.97,640,480,train +FvQUNJNvrU4,5413,5735,286-213-677-604,29.97,1280,720,train +FvQUNJNvrU4,10877,11039,523-164-935-576,29.97,1280,720,train +FvQUNJNvrU4,12496,12740,258-150-696-588,29.97,1280,720,train +FvQUNJNvrU4,15530,15943,509-79-1075-645,29.97,1280,720,train +-K57q5o3dn4,50,481,147-47-533-433,25.0,640,480,train +-K57q5o3dn4,481,633,109-28-499-418,25.0,640,480,train +-K57q5o3dn4,765,923,139-24-515-400,25.0,640,480,train +-K57q5o3dn4,1170,1330,102-39-483-420,25.0,640,480,train +-K57q5o3dn4,1875,2115,145-20-518-393,25.0,640,480,train +-K57q5o3dn4,2353,2509,135-45-538-448,25.0,640,480,train +-K57q5o3dn4,6353,6495,119-31-512-424,25.0,640,480,train +-K57q5o3dn4,8015,8240,128-62-445-379,25.0,640,480,train +-K57q5o3dn4,9860,9990,186-75-506-395,25.0,640,480,train +bI1XOdhWVaw,299,879,243-94-745-596,29.97,932,720,train +bI1XOdhWVaw,969,1132,281-134-763-616,29.97,932,720,train +bI1XOdhWVaw,1749,1934,295-95-835-635,29.97,932,720,train +bI1XOdhWVaw,2735,3127,221-141-682-602,29.97,932,720,train +bI1XOdhWVaw,4512,4668,157-157-581-581,29.97,932,720,train +IWUTAZF8yVw,1068,1204,686-455-1241-1010,29.97,1920,1080,train +IWUTAZF8yVw,1317,1497,918-467-1475-1024,29.97,1920,1080,train +IWUTAZF8yVw,3247,3401,732-441-1308-1017,29.97,1920,1080,train +IWUTAZF8yVw,3765,3986,535-480-1091-1036,29.97,1920,1080,train +IWUTAZF8yVw,5489,5628,534-475-1063-1004,29.97,1920,1080,train +IWUTAZF8yVw,7160,7404,1235-458-1807-1030,29.97,1920,1080,train +IWUTAZF8yVw,8978,9212,253-439-858-1044,29.97,1920,1080,train +IWUTAZF8yVw,10436,10652,387-385-1045-1043,29.97,1920,1080,train +EaEZVfhn07o,3814,4838,84-20-531-467,29.97,640,480,train +EaEZVfhn07o,4838,5107,90-34-520-464,29.97,640,480,train +LmHZE5oklag,628,1146,255-145-1130-1020,29.97,1440,1080,train +LmHZE5oklag,1146,2045,177-102-1106-1031,29.97,1440,1080,train +LmHZE5oklag,2056,2519,172-157-1030-1015,29.97,1440,1080,train +LmHZE5oklag,2519,3238,281-187-1149-1055,29.97,1440,1080,train +LmHZE5oklag,3245,3444,169-145-1022-998,29.97,1440,1080,train +LmHZE5oklag,3444,3932,224-192-1077-1045,29.97,1440,1080,train +LmHZE5oklag,3932,4595,238-154-1078-994,29.97,1440,1080,train +LmHZE5oklag,4812,4951,306-206-1081-981,29.97,1440,1080,train +LmHZE5oklag,4951,5406,231-179-1073-1021,29.97,1440,1080,train +LmHZE5oklag,6213,6617,240-189-1107-1056,29.97,1440,1080,train +7dYtvnHGNxQ,237,390,156-58-544-446,29.97,720,480,train +7dYtvnHGNxQ,526,672,165-75-545-455,29.97,720,480,train +7dYtvnHGNxQ,672,802,173-80-554-461,29.97,720,480,train +7dYtvnHGNxQ,1040,1182,179-67-532-420,29.97,720,480,train +7dYtvnHGNxQ,1479,1611,197-93-564-460,29.97,720,480,train +7dYtvnHGNxQ,1644,2294,164-54-560-450,29.97,720,480,train +7dYtvnHGNxQ,2300,2440,154-76-512-434,29.97,720,480,train +7dYtvnHGNxQ,2640,2856,130-45-516-431,29.97,720,480,train +7dYtvnHGNxQ,3750,4079,175-50-556-431,29.97,720,480,train +7dYtvnHGNxQ,4079,4278,194-15-601-422,29.97,720,480,train +7dYtvnHGNxQ,4299,4448,218-21-640-443,29.97,720,480,train +7dYtvnHGNxQ,4545,4732,193-37-585-429,29.97,720,480,train +7dYtvnHGNxQ,4934,5501,221-55-607-441,29.97,720,480,train +7dYtvnHGNxQ,5675,5897,142-71-531-460,29.97,720,480,train +7dYtvnHGNxQ,6009,6169,162-71-545-454,29.97,720,480,train +7dYtvnHGNxQ,7336,7508,124-82-517-475,29.97,720,480,train +4rW_9Y1eci8,1215,1409,346-27-922-603,25.0,1280,720,train +4rW_9Y1eci8,1409,1745,280-26-785-531,25.0,1280,720,train +4rW_9Y1eci8,1927,2078,310-29-848-567,25.0,1280,720,train +4rW_9Y1eci8,2272,2460,229-10-778-559,25.0,1280,720,train +4rW_9Y1eci8,2460,2611,164-30-725-591,25.0,1280,720,train +4rW_9Y1eci8,2919,3089,234-23-710-499,25.0,1280,720,train +4rW_9Y1eci8,3420,3556,230-11-740-521,25.0,1280,720,train +4rW_9Y1eci8,4067,4339,189-4-728-543,25.0,1280,720,train +4rW_9Y1eci8,4401,4569,125-9-641-525,25.0,1280,720,train +4rW_9Y1eci8,4569,4710,199-15-727-543,25.0,1280,720,train +4rW_9Y1eci8,5157,5437,311-14-830-533,25.0,1280,720,train +mndSqTrxpts,0,175,5-168-496-659,30.0,1280,720,test +mndSqTrxpts,2234,2406,700-130-1230-660,30.0,1280,720,test +mndSqTrxpts,2406,2550,565-132-1069-636,30.0,1280,720,test +mndSqTrxpts,2911,3105,576-116-1123-663,30.0,1280,720,test +mndSqTrxpts,3600,3794,338-136-879-677,30.0,1280,720,test +Wh3AmDsdQtM,0,1024,149-52-565-468,29.97,640,480,train +Wh3AmDsdQtM,1024,2048,127-55-539-467,29.97,640,480,train +Wh3AmDsdQtM,2048,2187,109-74-506-471,29.97,640,480,train +Wh3AmDsdQtM,2187,3211,122-53-540-471,29.97,640,480,train +Wh3AmDsdQtM,3211,4235,127-64-534-471,29.97,640,480,train +Wh3AmDsdQtM,4235,5227,136-66-538-468,29.97,640,480,train +Wh3AmDsdQtM,5228,5613,124-61-537-474,29.97,640,480,train +Wh3AmDsdQtM,5613,5834,78-56-489-467,29.97,640,480,train +Wh3AmDsdQtM,5834,6097,88-35-524-471,29.97,640,480,train +HZks-9WmW5s,100,429,120-91-468-439,29.97,640,480,train +HZks-9WmW5s,473,881,116-84-464-432,29.97,640,480,train +HZks-9WmW5s,937,1383,113-80-461-428,29.97,640,480,train +HZks-9WmW5s,1786,2031,159-70-555-466,29.97,640,480,train +HZks-9WmW5s,2890,3097,169-73-551-455,29.97,640,480,train +HZks-9WmW5s,3136,3563,157-72-557-472,29.97,640,480,train +dSIkl9ONOJc,400,1424,245-88-615-458,30.0,854,480,train +dSIkl9ONOJc,1478,2055,268-76-615-423,30.0,854,480,train +dSIkl9ONOJc,2133,3039,248-89-620-461,30.0,854,480,train +dSIkl9ONOJc,3039,3199,254-97-616-459,30.0,854,480,train +dSIkl9ONOJc,3199,3481,249-88-621-460,30.0,854,480,train +dSIkl9ONOJc,3481,4328,270-85-612-427,30.0,854,480,train +dSIkl9ONOJc,4389,4739,244-85-623-464,30.0,854,480,train +dSIkl9ONOJc,4777,5311,250-92-620-462,30.0,854,480,train +dSIkl9ONOJc,5311,5762,238-74-627-463,30.0,854,480,train +dSIkl9ONOJc,6170,6801,241-76-622-457,30.0,854,480,train +dSIkl9ONOJc,6820,7157,242-75-627-460,30.0,854,480,train +dSIkl9ONOJc,7536,7964,244-58-613-427,30.0,854,480,train +dSIkl9ONOJc,7964,8151,240-38-636-434,30.0,854,480,train +dSIkl9ONOJc,8249,8560,241-72-628-459,30.0,854,480,train +dSIkl9ONOJc,8573,8721,236-71-626-461,30.0,854,480,train +OOzyPCkjVmE,250,500,157-91-490-424,30.0,640,480,train +OOzyPCkjVmE,500,750,177-95-495-413,30.0,640,480,train +OOzyPCkjVmE,750,893,182-110-493-421,30.0,640,480,train +OOzyPCkjVmE,1868,2000,130-113-441-424,30.0,640,480,train +OOzyPCkjVmE,2064,2250,115-98-450-433,30.0,640,480,train +OOzyPCkjVmE,2327,2500,108-109-419-420,30.0,640,480,train +OOzyPCkjVmE,2591,2874,93-102-401-410,30.0,640,480,train +OOzyPCkjVmE,5500,5670,84-92-407-415,30.0,640,480,train +OOzyPCkjVmE,7316,7454,167-91-485-409,30.0,640,480,train +OOzyPCkjVmE,7573,7742,128-80-479-431,30.0,640,480,train +OOzyPCkjVmE,10361,10500,200-98-478-376,30.0,640,480,train +5ujMzSyHO_8,687,1459,129-124-420-415,25.0,600,480,train +5ujMzSyHO_8,1459,1690,162-120-453-411,25.0,600,480,train +skHdRWJ_Zuk,2035,2471,300-175-839-714,29.97,1280,720,train +skHdRWJ_Zuk,2895,3164,241-154-796-709,29.97,1280,720,train +skHdRWJ_Zuk,3227,3410,255-145-827-717,29.97,1280,720,train +skHdRWJ_Zuk,13321,13499,288-164-837-713,29.97,1280,720,train +skHdRWJ_Zuk,16937,17125,435-198-926-689,29.97,1280,720,train +skHdRWJ_Zuk,17825,17983,256-153-820-717,29.97,1280,720,train +skHdRWJ_Zuk,18854,19003,236-136-802-702,29.97,1280,720,train +skHdRWJ_Zuk,19003,19142,250-108-848-706,29.97,1280,720,train +skHdRWJ_Zuk,19651,19812,350-159-909-718,29.97,1280,720,train +gkt8MPv8g1k,4933,5091,234-142-725-633,29.97,1280,720,train +gkt8MPv8g1k,5143,5440,228-137-731-640,29.97,1280,720,train +gkt8MPv8g1k,7498,7631,238-124-771-657,29.97,1280,720,train +gkt8MPv8g1k,7840,8140,253-99-793-639,29.97,1280,720,train +gkt8MPv8g1k,8470,8648,276-122-730-576,29.97,1280,720,train +gkt8MPv8g1k,9123,9323,339-150-799-610,29.97,1280,720,train +gkt8MPv8g1k,10282,10432,291-119-801-629,29.97,1280,720,train +gkt8MPv8g1k,11308,11465,315-36-916-637,29.97,1280,720,train +gkt8MPv8g1k,11865,11997,460-81-1025-646,29.97,1280,720,train +gkt8MPv8g1k,12019,12165,641-103-1201-663,29.97,1280,720,train +gkt8MPv8g1k,13072,13331,229-108-750-629,29.97,1280,720,train +gkt8MPv8g1k,13502,13686,351-136-854-639,29.97,1280,720,train +gkt8MPv8g1k,14795,14974,245-59-782-596,29.97,1280,720,train +olTSwTofrDM,420,660,278-26-909-657,29.97,1280,720,train +olTSwTofrDM,800,1163,347-57-944-654,29.97,1280,720,train +olTSwTofrDM,1163,1410,369-27-1010-668,29.97,1280,720,train +olTSwTofrDM,1839,2231,367-70-948-651,29.97,1280,720,train +olTSwTofrDM,2392,2524,463-81-1023-641,29.97,1280,720,train +olTSwTofrDM,2524,2697,361-84-911-634,29.97,1280,720,train +olTSwTofrDM,2702,2937,463-70-1035-642,29.97,1280,720,train +olTSwTofrDM,3263,3403,311-83-871-643,29.97,1280,720,train +olTSwTofrDM,3861,4063,400-26-1005-631,29.97,1280,720,train +olTSwTofrDM,4251,4385,180-49-765-634,29.97,1280,720,train +olTSwTofrDM,4600,4745,307-83-861-637,29.97,1280,720,train +olTSwTofrDM,4745,5029,211-95-756-640,29.97,1280,720,train +olTSwTofrDM,5238,5373,251-90-796-635,29.97,1280,720,train +olTSwTofrDM,6268,6492,284-46-890-652,29.97,1280,720,train +jFuiqvVaESE,450,1050,303-142-717-556,29.97,960,720,train +jFuiqvVaESE,1561,1742,259-85-773-599,29.97,960,720,train +jFuiqvVaESE,2154,2340,249-133-702-586,29.97,960,720,train +jFuiqvVaESE,2661,2850,193-99-664-570,29.97,960,720,train +jFuiqvVaESE,3980,4175,247-94-733-580,29.97,960,720,train +jFuiqvVaESE,4222,4350,170-89-719-638,29.97,960,720,train +jFuiqvVaESE,4873,5021,276-109-742-575,29.97,960,720,train +jFuiqvVaESE,5650,5884,231-93-714-576,29.97,960,720,train +jFuiqvVaESE,6800,6950,269-138-706-575,29.97,960,720,train +Mo87mOAnAio,115,579,498-178-953-633,29.97,1280,720,train +Mo87mOAnAio,658,878,481-169-934-622,29.97,1280,720,train +Mo87mOAnAio,878,1151,518-195-966-643,29.97,1280,720,train +Mo87mOAnAio,1842,2317,470-212-889-631,29.97,1280,720,train +Mo87mOAnAio,2321,2696,497-238-837-578,29.97,1280,720,train +Mo87mOAnAio,5003,5136,340-155-896-711,29.97,1280,720,train +kt2xwj8xiZI,381,996,241-119-713-591,29.97,1280,720,train +kt2xwj8xiZI,1022,1224,238-162-683-607,29.97,1280,720,train +kt2xwj8xiZI,1320,1525,217-147-693-623,29.97,1280,720,train +kt2xwj8xiZI,1587,1758,222-156-686-620,29.97,1280,720,train +kt2xwj8xiZI,2167,2447,376-124-870-618,29.97,1280,720,train +kt2xwj8xiZI,3157,3286,578-133-1063-618,29.97,1280,720,train +kt2xwj8xiZI,3343,3474,605-150-1074-619,29.97,1280,720,train +kt2xwj8xiZI,4010,4169,384-118-868-602,29.97,1280,720,train +kt2xwj8xiZI,4436,4597,301-135-792-626,29.97,1280,720,train +kt2xwj8xiZI,4637,4868,293-140-779-626,29.97,1280,720,train +kt2xwj8xiZI,5497,5712,184-159-628-603,29.97,1280,720,train +kt2xwj8xiZI,5794,6223,199-147-646-594,29.97,1280,720,train +kt2xwj8xiZI,6267,6404,143-151-605-613,29.97,1280,720,train +kt2xwj8xiZI,6804,7020,349-148-823-622,29.97,1280,720,train +kt2xwj8xiZI,8422,8633,646-109-1127-590,29.97,1280,720,train +kt2xwj8xiZI,9044,9214,404-128-885-609,29.97,1280,720,train +kt2xwj8xiZI,9316,9571,431-125-934-628,29.97,1280,720,train +kt2xwj8xiZI,9967,10146,171-125-646-600,29.97,1280,720,train +kt2xwj8xiZI,10505,10667,331-154-762-585,29.97,1280,720,train +kt2xwj8xiZI,11308,11484,304-174-713-583,29.97,1280,720,train +kt2xwj8xiZI,12196,12340,302-137-755-590,29.97,1280,720,train +kt2xwj8xiZI,12340,12638,284-127-735-578,29.97,1280,720,train +cB0yZrBOIXQ,362,639,336-89-934-687,25.0,1280,720,test +cB0yZrBOIXQ,866,1020,310-77-912-679,25.0,1280,720,test +cB0yZrBOIXQ,1162,1627,291-111-893-713,25.0,1280,720,test +cB0yZrBOIXQ,4698,4834,325-69-951-695,25.0,1280,720,test +cB0yZrBOIXQ,4834,5038,253-61-897-705,25.0,1280,720,test +cB0yZrBOIXQ,5098,5317,383-74-985-676,25.0,1280,720,test +Ttc2stTBRGI,599,850,181-116-696-631,29.97,1280,720,train +Ttc2stTBRGI,922,1054,261-162-723-624,29.97,1280,720,train +Ttc2stTBRGI,1546,1688,388-145-865-622,29.97,1280,720,train +Ttc2stTBRGI,2450,2592,651-148-1113-610,29.97,1280,720,train +Ttc2stTBRGI,2671,2852,560-184-976-600,29.97,1280,720,train +Ttc2stTBRGI,2923,3052,474-139-946-611,29.97,1280,720,train +Ttc2stTBRGI,4145,4274,412-107-951-646,29.97,1280,720,train +Ttc2stTBRGI,4362,4530,279-151-769-641,29.97,1280,720,train +Ttc2stTBRGI,5305,5488,224-160-663-599,29.97,1280,720,train +Ttc2stTBRGI,5600,5796,227-168-659-600,29.97,1280,720,train +Ttc2stTBRGI,5796,5925,230-161-681-612,29.97,1280,720,train +Ttc2stTBRGI,6208,6374,253-173-675-595,29.97,1280,720,train +Ttc2stTBRGI,6484,6627,255-183-660-588,29.97,1280,720,train +Ttc2stTBRGI,6656,6787,237-174-652-589,29.97,1280,720,train +Ttc2stTBRGI,7095,7241,265-137-726-598,29.97,1280,720,train +Ttc2stTBRGI,7243,7466,249-132-706-589,29.97,1280,720,train +ETywBOZ19Y0,606,738,1062-260-1876-1074,29.97,1920,1080,train +ETywBOZ19Y0,865,995,725-147-1561-983,29.97,1920,1080,train +ETywBOZ19Y0,995,1173,687-218-1491-1022,29.97,1920,1080,train +ETywBOZ19Y0,1192,1325,495-238-1258-1001,29.97,1920,1080,train +ETywBOZ19Y0,1447,1668,501-167-1348-1014,29.97,1920,1080,train +ETywBOZ19Y0,1668,1802,580-124-1471-1015,29.97,1920,1080,train +ETywBOZ19Y0,1926,2239,599-191-1477-1069,29.97,1920,1080,train +ETywBOZ19Y0,4796,5567,628-30-1677-1079,29.97,1920,1080,train +ETywBOZ19Y0,5724,6134,594-62-1610-1078,29.97,1920,1080,train +ETywBOZ19Y0,7020,7273,162-83-1159-1080,29.97,1920,1080,train +ETywBOZ19Y0,8463,8654,38-26-1071-1059,29.97,1920,1080,train +ETywBOZ19Y0,9818,10612,764-35-1740-1011,29.97,1920,1080,train +ETywBOZ19Y0,10883,11441,1021-140-1864-983,29.97,1920,1080,train +ETywBOZ19Y0,11614,11766,472-172-1377-1077,29.97,1920,1080,train +ETywBOZ19Y0,11766,12135,537-128-1468-1059,29.97,1920,1080,train +ETywBOZ19Y0,12323,12606,488-63-1472-1047,29.97,1920,1080,train +ETywBOZ19Y0,12639,13004,645-27-1687-1069,29.97,1920,1080,train +fLYeTpU8b1A,2654,2936,461-10-1525-1074,29.97,1920,1080,train +fLYeTpU8b1A,4439,4576,50-21-1098-1069,29.97,1920,1080,train +DMEaUoA8EPE,28,354,560-331-1218-989,25.0,1920,1080,test +DMEaUoA8EPE,597,810,514-387-1177-1050,25.0,1920,1080,test +iFMbu9-Mejc,75,344,230-45-654-469,25.0,854,480,train +iFMbu9-Mejc,344,520,199-23-642-466,25.0,854,480,train +iFMbu9-Mejc,834,1054,224-39-638-453,25.0,854,480,train +iFMbu9-Mejc,1216,1369,234-56-637-459,25.0,854,480,train +iFMbu9-Mejc,2009,2150,263-77-645-459,25.0,854,480,train +iFMbu9-Mejc,2759,3064,197-54-607-464,25.0,854,480,train +iFMbu9-Mejc,4989,5131,260-56-648-444,25.0,854,480,train +nHGRRI8otpE,4964,5223,191-103-802-714,29.67,1280,720,train +nHGRRI8otpE,6556,6821,648-125-1198-675,29.67,1280,720,train +nHGRRI8otpE,6864,7297,699-93-1270-664,29.67,1280,720,train +nHGRRI8otpE,7644,7843,597-108-1113-624,29.67,1280,720,train +nHGRRI8otpE,7843,8091,589-81-1130-622,29.67,1280,720,train +nHGRRI8otpE,8091,8229,494-55-1056-617,29.67,1280,720,train +nHGRRI8otpE,8253,8429,574-98-1082-606,29.67,1280,720,train +nHGRRI8otpE,8429,8701,616-64-1146-594,29.67,1280,720,train +nHGRRI8otpE,10363,10598,134-13-787-666,29.67,1280,720,train +nHGRRI8otpE,10965,11173,130-30-757-657,29.67,1280,720,train +nHGRRI8otpE,11292,11438,9-0-672-662,29.67,1280,720,train +nHGRRI8otpE,12581,13029,99-84-613-598,29.67,1280,720,train +nHGRRI8otpE,13241,13424,93-64-607-578,29.67,1280,720,train +mNtz9Kq4-YU,0,246,295-143-700-548,25.0,1280,720,train +mNtz9Kq4-YU,1202,1344,513-153-894-534,25.0,1280,720,train +mNtz9Kq4-YU,3942,4319,345-111-727-493,25.0,1280,720,train +mNtz9Kq4-YU,8072,8434,347-87-747-487,25.0,1280,720,train +lCb5w6n8kPs,4451,4631,273-126-535-388,30.0,810,480,test +lCb5w6n8kPs,10150,10295,292-123-558-389,30.0,810,480,test +lCb5w6n8kPs,11879,12014,237-88-541-392,30.0,810,480,test +lCb5w6n8kPs,12081,12216,242-88-547-393,30.0,810,480,test +S5DkrAAcQP0,368,1392,44-55-674-685,25.0,1280,720,train +S5DkrAAcQP0,1392,2270,247-132-749-634,25.0,1280,720,train +S5DkrAAcQP0,2379,2562,372-145-846-619,25.0,1280,720,train +S5DkrAAcQP0,2597,2795,366-160-847-641,25.0,1280,720,train +S5DkrAAcQP0,2836,3048,348-136-848-636,25.0,1280,720,train +S5DkrAAcQP0,3149,3587,506-118-1027-639,25.0,1280,720,train +S5DkrAAcQP0,3957,4124,535-110-1093-668,25.0,1280,720,train +S5DkrAAcQP0,4124,4268,553-75-1152-674,25.0,1280,720,train +ZFpTP2fSThw,0,135,826-231-1625-1030,25.0,1920,1080,test +ZFpTP2fSThw,135,270,815-245-1592-1022,25.0,1920,1080,test +ZFpTP2fSThw,405,540,808-256-1609-1057,25.0,1920,1080,test +ZFpTP2fSThw,540,675,816-241-1604-1029,25.0,1920,1080,test +ZFpTP2fSThw,744,945,859-316-1605-1062,25.0,1920,1080,test +ZFpTP2fSThw,1826,1956,806-310-1512-1016,25.0,1920,1080,test +ZFpTP2fSThw,5265,5400,948-173-1767-992,25.0,1920,1080,test +h8zzn3C5TgU,809,942,407-91-1004-688,29.97,1280,720,train +h8zzn3C5TgU,942,1167,404-52-1046-694,29.97,1280,720,train +h8zzn3C5TgU,1167,1519,323-20-1016-713,29.97,1280,720,train +h8zzn3C5TgU,1942,2314,398-68-1029-699,29.97,1280,720,train +h8zzn3C5TgU,2894,3238,323-53-979-709,29.97,1280,720,train +h8zzn3C5TgU,3635,4148,393-44-1044-695,29.97,1280,720,train +boSXUZ2t7yQ,3034,3220,207-28-625-446,30.0,810,480,train +boSXUZ2t7yQ,3311,3458,212-31-623-442,30.0,810,480,train +boSXUZ2t7yQ,3548,3717,230-49-593-412,30.0,810,480,train +boSXUZ2t7yQ,3717,4456,203-31-587-415,30.0,810,480,train +boSXUZ2t7yQ,4549,4703,235-46-605-416,30.0,810,480,train +boSXUZ2t7yQ,4795,5004,240-40-613-413,30.0,810,480,train +boSXUZ2t7yQ,5058,5189,230-26-626-422,30.0,810,480,train +6jHyn4z0KLk,1890,2170,165-112-440-387,25.0,640,480,train +6jHyn4z0KLk,2962,3100,197-120-469-392,25.0,640,480,train +0l9UlEelXFI,196,737,153-112-474-433,25.0,600,480,train +0l9UlEelXFI,738,916,187-149-451-413,25.0,600,480,train +0l9UlEelXFI,945,1236,193-145-459-411,25.0,600,480,train +0l9UlEelXFI,1443,1623,226-141-514-429,25.0,600,480,train +0l9UlEelXFI,1744,2138,237-142-528-433,25.0,600,480,train +0l9UlEelXFI,2148,2364,111-122-424-435,25.0,600,480,train +0l9UlEelXFI,2373,2545,90-120-407-437,25.0,600,480,train +0l9UlEelXFI,2545,2699,176-135-476-435,25.0,600,480,train +0l9UlEelXFI,2985,3443,230-125-539-434,25.0,600,480,train +0l9UlEelXFI,4050,4310,101-110-463-472,25.0,600,480,train +0l9UlEelXFI,4991,5173,88-57-507-476,25.0,600,480,train +0l9UlEelXFI,5201,5380,132-52-556-476,25.0,600,480,train +kTHsY7fbcgQ,361,762,460-90-995-625,25.0,1280,720,train +kTHsY7fbcgQ,762,1045,473-54-1060-641,25.0,1280,720,train +kTHsY7fbcgQ,1045,1235,457-95-1031-669,25.0,1280,720,train +kTHsY7fbcgQ,1235,2259,350-67-947-664,25.0,1280,720,train +kTHsY7fbcgQ,2365,2591,508-78-1109-679,25.0,1280,720,train +kTHsY7fbcgQ,2906,3123,385-95-926-636,25.0,1280,720,train +kTHsY7fbcgQ,3186,3333,393-113-931-651,25.0,1280,720,train +kTHsY7fbcgQ,3509,3686,390-120-913-643,25.0,1280,720,train +kTHsY7fbcgQ,3806,3940,391-113-925-647,25.0,1280,720,train +kTHsY7fbcgQ,4203,5227,438-26-1092-680,25.0,1280,720,train +kTHsY7fbcgQ,5275,5758,443-87-1048-692,25.0,1280,720,train +kTHsY7fbcgQ,5758,5931,473-58-1089-674,25.0,1280,720,train +PKE4yfDESa0,0,421,92-23-529-460,29.97,640,480,train +PKE4yfDESa0,421,1445,95-0-558-463,29.97,640,480,train +PKE4yfDESa0,1445,2469,92-22-533-463,29.97,640,480,train +PKE4yfDESa0,2469,3493,108-38-532-462,29.97,640,480,train +PKE4yfDESa0,3493,4517,98-26-537-465,29.97,640,480,train +PKE4yfDESa0,4598,4818,101-27-533-459,29.97,640,480,train +PKE4yfDESa0,4818,5657,92-5-551-464,29.97,640,480,train +vVRBxYkYILo,166,1190,1003-109-1855-961,29.97,1920,1080,train +vVRBxYkYILo,1242,1499,1034-148-1841-955,29.97,1920,1080,train +vVRBxYkYILo,2555,2883,191-208-991-1008,29.97,1920,1080,train +vVRBxYkYILo,3026,3161,71-207-862-998,29.97,1920,1080,train +vVRBxYkYILo,3218,3387,72-188-836-952,29.97,1920,1080,train +vVRBxYkYILo,3809,3954,221-128-1080-987,29.97,1920,1080,train +vVRBxYkYILo,4168,4399,365-94-1313-1042,29.97,1920,1080,train +vVRBxYkYILo,4399,4529,593-121-1493-1021,29.97,1920,1080,train +vVRBxYkYILo,4663,5010,714-37-1662-985,29.97,1920,1080,train +vVRBxYkYILo,6187,6412,353-154-1064-865,29.97,1920,1080,train +vVRBxYkYILo,6733,6900,313-208-984-879,29.97,1920,1080,train +vVRBxYkYILo,6919,7085,234-207-926-899,29.97,1920,1080,train +vVRBxYkYILo,7105,7266,186-152-911-877,29.97,1920,1080,train +vVRBxYkYILo,7291,7546,506-241-1127-862,29.97,1920,1080,train +vVRBxYkYILo,8538,8680,1149-271-1747-869,29.97,1920,1080,train +vVRBxYkYILo,8961,9207,1024-206-1756-938,29.97,1920,1080,train +vVRBxYkYILo,9314,9833,918-271-1530-883,29.97,1920,1080,train +vVRBxYkYILo,10043,10645,875-207-1528-860,29.97,1920,1080,train +-uPGb1vn9Pk,271,520,565-404-822-661,25.0,1280,720,train +-uPGb1vn9Pk,3527,3903,626-416-882-672,25.0,1280,720,train +-uPGb1vn9Pk,8686,8949,712-400-974-662,25.0,1280,720,train +-uPGb1vn9Pk,10632,10827,680-405-962-687,25.0,1280,720,train +-uPGb1vn9Pk,10837,10965,756-423-1017-684,25.0,1280,720,train +-uPGb1vn9Pk,11753,11958,785-389-1076-680,25.0,1280,720,train +-uPGb1vn9Pk,12081,12508,735-387-1033-685,25.0,1280,720,train +-uPGb1vn9Pk,12508,12793,667-387-970-690,25.0,1280,720,train +-uPGb1vn9Pk,12818,12996,641-402-924-685,25.0,1280,720,train +-uPGb1vn9Pk,13134,13484,538-407-812-681,25.0,1280,720,train +-uPGb1vn9Pk,13484,13618,469-416-730-677,25.0,1280,720,train +-uPGb1vn9Pk,14669,14909,349-413-610-674,25.0,1280,720,train +-uPGb1vn9Pk,14915,15536,358-407-627-676,25.0,1280,720,train +-uPGb1vn9Pk,15849,16074,345-416-604-675,25.0,1280,720,train +-uPGb1vn9Pk,20707,21076,504-374-792-662,25.0,1280,720,train +w81Tr0Dp1K8,1120,1378,4-307-484-787,29.97,1920,1080,test +w81Tr0Dp1K8,1375,1516,1267-268-1793-794,29.97,1920,1080,test +w81Tr0Dp1K8,2125,2449,541-220-1163-842,29.97,1920,1080,test +w81Tr0Dp1K8,2947,3160,569-222-1178-831,29.97,1920,1080,test +w81Tr0Dp1K8,3550,3698,4-314-470-780,29.97,1920,1080,test +w81Tr0Dp1K8,4036,4218,670-235-1287-852,29.97,1920,1080,test +w81Tr0Dp1K8,8275,8854,432-149-1199-916,29.97,1920,1080,test +w81Tr0Dp1K8,8965,9131,409-86-1249-926,29.97,1920,1080,test +w81Tr0Dp1K8,10513,10662,579-181-1300-902,29.97,1920,1080,test +w81Tr0Dp1K8,11502,11671,467-136-1241-910,29.97,1920,1080,test +w81Tr0Dp1K8,12112,12295,22-265-586-829,29.97,1920,1080,test +w81Tr0Dp1K8,12240,12431,434-87-1264-917,29.97,1920,1080,test +w81Tr0Dp1K8,12373,12752,1090-190-1719-819,29.97,1920,1080,test +w81Tr0Dp1K8,12900,13080,382-140-1140-898,29.97,1920,1080,test +w81Tr0Dp1K8,13602,13783,438-132-1215-909,29.97,1920,1080,test +w81Tr0Dp1K8,14180,14347,460-140-1240-920,29.97,1920,1080,test +w81Tr0Dp1K8,14197,14377,12-320-522-830,29.97,1920,1080,test +w81Tr0Dp1K8,14756,14888,1027-196-1654-823,29.97,1920,1080,test +w81Tr0Dp1K8,15145,15329,416-85-1235-904,29.97,1920,1080,test +w81Tr0Dp1K8,15329,15485,6-269-573-836,29.97,1920,1080,test +w81Tr0Dp1K8,15339,15664,422-120-1193-891,29.97,1920,1080,test +w81Tr0Dp1K8,15670,15801,6-262-579-835,29.97,1920,1080,test +A3ZmT97hAWU,95,678,136-86-477-427,29.97,640,480,test +A3ZmT97hAWU,768,1653,103-66-487-450,29.97,640,480,test +A3ZmT97hAWU,1737,2033,117-50-500-433,29.97,640,480,test +A3ZmT97hAWU,2455,2899,49-36-466-453,29.97,640,480,test +A3ZmT97hAWU,3006,3151,147-77-508-438,29.97,640,480,test +A3ZmT97hAWU,3169,3528,83-69-453-439,29.97,640,480,test +A3ZmT97hAWU,3883,4023,128-67-468-407,29.97,640,480,test +A3ZmT97hAWU,4436,4582,159-92-496-429,29.97,640,480,test +A3ZmT97hAWU,5410,5643,176-79-515-418,29.97,640,480,test +A3ZmT97hAWU,6808,7014,148-83-485-420,29.97,640,480,test +A3ZmT97hAWU,7482,7618,72-58-423-409,29.97,640,480,test +A3ZmT97hAWU,7778,8125,148-71-481-404,29.97,640,480,test +A3ZmT97hAWU,8544,8731,58-82-396-420,29.97,640,480,test +A3ZmT97hAWU,9460,9605,33-69-399-435,29.97,640,480,test +yZ5l8EJeCMI,165,992,59-94-369-404,25.0,600,480,train +yZ5l8EJeCMI,2823,3049,116-121-390-395,25.0,600,480,train +eOdr6vu_Y6g,1307,1494,414-194-790-570,29.97,1280,720,train +eOdr6vu_Y6g,1936,2203,431-194-804-567,29.97,1280,720,train +eOdr6vu_Y6g,2203,2348,409-236-738-565,29.97,1280,720,train +eOdr6vu_Y6g,3059,3265,480-225-821-566,29.97,1280,720,train +eOdr6vu_Y6g,3451,3616,444-187-815-558,29.97,1280,720,train +eOdr6vu_Y6g,6837,6992,281-52-944-715,29.97,1280,720,train +eOdr6vu_Y6g,9748,9938,327-139-875-687,29.97,1280,720,train +eOdr6vu_Y6g,11378,11551,183-120-774-711,29.97,1280,720,train +tN_7bWyIWDU,16,1005,279-39-868-628,30.0,1280,720,train +tN_7bWyIWDU,1519,1753,632-89-1167-624,30.0,1280,720,train +tN_7bWyIWDU,2885,3015,381-128-906-653,30.0,1280,720,train +tN_7bWyIWDU,3015,3177,416-104-950-638,30.0,1280,720,train +tN_7bWyIWDU,3177,3495,322-97-840-615,30.0,1280,720,train +tN_7bWyIWDU,3821,3970,371-100-901-630,30.0,1280,720,train +tN_7bWyIWDU,3970,4160,258-104-778-624,30.0,1280,720,train +tN_7bWyIWDU,4220,4570,250-79-801-630,30.0,1280,720,train +tN_7bWyIWDU,4747,5007,261-93-812-644,30.0,1280,720,train +tN_7bWyIWDU,5007,5280,349-65-903-619,30.0,1280,720,train +tN_7bWyIWDU,5280,5713,306-97-846-637,30.0,1280,720,train +tN_7bWyIWDU,5924,6499,311-102-878-669,30.0,1280,720,train +tN_7bWyIWDU,6581,6750,306-106-865-665,30.0,1280,720,train +tN_7bWyIWDU,7040,7269,327-63-894-630,30.0,1280,720,train +tN_7bWyIWDU,7495,7627,299-28-896-625,30.0,1280,720,train +tN_7bWyIWDU,7691,7943,303-94-829-620,30.0,1280,720,train +tN_7bWyIWDU,8000,8192,402-74-949-621,30.0,1280,720,train +tN_7bWyIWDU,8458,8960,233-24-840-631,30.0,1280,720,train +tN_7bWyIWDU,9007,9377,347-91-901-645,30.0,1280,720,train +tN_7bWyIWDU,9377,9673,336-61-899-624,30.0,1280,720,train +tN_7bWyIWDU,10225,10400,441-43-1044-646,30.0,1280,720,train +tN_7bWyIWDU,10660,10990,498-57-1054-613,30.0,1280,720,train +kXy-sWk4-jY,170,322,781-196-1407-822,25.0,1920,1080,train +kXy-sWk4-jY,460,688,990-201-1605-816,25.0,1920,1080,train +kXy-sWk4-jY,755,1002,801-218-1411-828,25.0,1920,1080,train +kXy-sWk4-jY,1002,1210,795-228-1426-859,25.0,1920,1080,train +kXy-sWk4-jY,1287,1454,603-236-1205-838,25.0,1920,1080,train +kXy-sWk4-jY,1703,1876,762-243-1444-925,25.0,1920,1080,train +kXy-sWk4-jY,2002,2373,761-320-1279-838,25.0,1920,1080,train +kXy-sWk4-jY,2468,2693,809-315-1331-837,25.0,1920,1080,train +kXy-sWk4-jY,2947,3147,909-315-1402-808,25.0,1920,1080,train +kXy-sWk4-jY,3950,4085,779-296-1234-751,25.0,1920,1080,train +kXy-sWk4-jY,5417,5586,627-297-1166-836,25.0,1920,1080,train +UMKGRC2QMFo,0,205,413-209-1200-996,29.97,1920,1080,train +UMKGRC2QMFo,205,800,408-213-1176-981,29.97,1920,1080,train +UMKGRC2QMFo,800,1056,592-256-1331-995,29.97,1920,1080,train +UMKGRC2QMFo,1181,1329,564-301-1262-999,29.97,1920,1080,train +UMKGRC2QMFo,1379,1519,648-285-1345-982,29.97,1920,1080,train +UMKGRC2QMFo,1699,2027,493-284-1169-960,29.97,1920,1080,train +UMKGRC2QMFo,2134,2288,523-307-1159-943,29.97,1920,1080,train +UMKGRC2QMFo,3352,3491,635-264-1309-938,29.97,1920,1080,train +UMKGRC2QMFo,4066,4225,503-300-1143-940,29.97,1920,1080,train +UMKGRC2QMFo,4225,4451,601-344-1197-940,29.97,1920,1080,train +Iw91zmQTMm0,1634,1851,412-53-1079-720,29.97,1280,720,test +Iw91zmQTMm0,2436,2702,360-126-899-665,29.97,1280,720,test +Iw91zmQTMm0,2820,3234,329-96-910-677,29.97,1280,720,test +Iw91zmQTMm0,3234,3387,420-104-991-675,29.97,1280,720,test +Iw91zmQTMm0,3445,3597,345-100-929-684,29.97,1280,720,test +Iw91zmQTMm0,3619,4049,263-86-840-663,29.97,1280,720,test +Iw91zmQTMm0,4049,4465,331-87-916-672,29.97,1280,720,test +0ye2tnrow_I,1218,1634,315-47-983-715,29.97,1280,720,train +0ye2tnrow_I,2188,2406,315-68-940-693,29.97,1280,720,train +0ye2tnrow_I,2976,3112,362-112-909-659,29.97,1280,720,train +0ye2tnrow_I,3202,3465,445-58-1068-681,29.97,1280,720,train +0ye2tnrow_I,5782,5922,5-81-643-719,29.97,1280,720,train +0ye2tnrow_I,6359,6688,1-72-630-701,29.97,1280,720,train +0ye2tnrow_I,6775,6948,374-82-989-697,29.97,1280,720,train +0ye2tnrow_I,7035,7216,326-110-910-694,29.97,1280,720,train +0ye2tnrow_I,7232,7685,347-52-985-690,29.97,1280,720,train +DfSnNtAcWAs,1390,1581,427-321-1140-1034,29.97,1920,1080,train +DfSnNtAcWAs,1651,1915,528-306-1252-1030,29.97,1920,1080,train +DfSnNtAcWAs,1982,2739,493-172-1329-1008,29.97,1920,1080,train +DfSnNtAcWAs,2743,3767,507-185-1342-1020,29.97,1920,1080,train +DfSnNtAcWAs,3838,4095,618-286-1314-982,29.97,1920,1080,train +DfSnNtAcWAs,4232,4472,615-294-1300-979,29.97,1920,1080,train +DfSnNtAcWAs,5019,5475,723-251-1460-988,29.97,1920,1080,train +DfSnNtAcWAs,5682,5972,704-328-1380-1004,29.97,1920,1080,train +DfSnNtAcWAs,6199,6603,462-127-1328-993,29.97,1920,1080,train +DfSnNtAcWAs,6804,6970,700-329-1355-984,29.97,1920,1080,train +DfSnNtAcWAs,6980,7343,675-290-1374-989,29.97,1920,1080,train +DfSnNtAcWAs,7507,7635,495-325-1208-1038,29.97,1920,1080,train +DfSnNtAcWAs,8344,8515,625-257-1370-1002,29.97,1920,1080,train +DfSnNtAcWAs,9199,9532,284-111-1185-1012,29.97,1920,1080,train +DfSnNtAcWAs,10074,10624,321-156-1193-1028,29.97,1920,1080,train +DfSnNtAcWAs,10773,10917,423-315-1113-1005,29.97,1920,1080,train +DfSnNtAcWAs,11692,11864,529-346-1235-1052,29.97,1920,1080,train +DfSnNtAcWAs,12084,12234,458-358-1116-1016,29.97,1920,1080,train +DfSnNtAcWAs,12272,12416,352-336-1034-1018,29.97,1920,1080,train +DfSnNtAcWAs,12499,12692,493-265-1254-1026,29.97,1920,1080,train +DfSnNtAcWAs,12728,12906,390-313-1097-1020,29.97,1920,1080,train +DfSnNtAcWAs,13001,13131,365-324-1040-999,29.97,1920,1080,train +DfSnNtAcWAs,13332,13508,385-216-1216-1047,29.97,1920,1080,train +DfSnNtAcWAs,14200,14383,700-313-1423-1036,29.97,1920,1080,train +DfSnNtAcWAs,14429,14558,464-158-1317-1011,29.97,1920,1080,train +DfSnNtAcWAs,14658,14844,578-299-1303-1024,29.97,1920,1080,train +DfSnNtAcWAs,15250,15769,623-272-1386-1035,29.97,1920,1080,train +DfSnNtAcWAs,15892,16447,552-133-1455-1036,29.97,1920,1080,train +DfSnNtAcWAs,16981,17343,470-94-1389-1013,29.97,1920,1080,train +DfSnNtAcWAs,17370,17527,744-254-1493-1003,29.97,1920,1080,train +DfSnNtAcWAs,17631,17811,603-273-1390-1060,29.97,1920,1080,train +DfSnNtAcWAs,18096,18263,305-221-1150-1066,29.97,1920,1080,train +DfSnNtAcWAs,18546,18683,366-262-1154-1050,29.97,1920,1080,train +DfSnNtAcWAs,19403,19627,471-257-1242-1028,29.97,1920,1080,train +DfSnNtAcWAs,19733,20059,626-231-1424-1029,29.97,1920,1080,train +DfSnNtAcWAs,20333,20603,623-215-1427-1019,29.97,1920,1080,train +DfSnNtAcWAs,20603,20939,594-175-1438-1019,29.97,1920,1080,train +DfSnNtAcWAs,21010,21665,423-236-1196-1009,29.97,1920,1080,train +EDGjhmIMCnw,561,693,181-38-823-680,29.97,1280,720,train +EDGjhmIMCnw,1349,1647,401-73-1009-681,29.97,1280,720,train +EDGjhmIMCnw,1956,2120,499-81-1064-646,29.97,1280,720,train +EDGjhmIMCnw,2241,2535,416-106-986-676,29.97,1280,720,train +EDGjhmIMCnw,2848,3066,319-106-874-661,29.97,1280,720,train +EDGjhmIMCnw,3180,3330,387-102-948-663,29.97,1280,720,train +EDGjhmIMCnw,3397,3614,389-66-1004-681,29.97,1280,720,train +EDGjhmIMCnw,3831,3982,571-68-1182-679,29.97,1280,720,train +EDGjhmIMCnw,4092,4223,311-49-938-676,29.97,1280,720,train +EDGjhmIMCnw,5034,5228,338-88-933-683,29.97,1280,720,train +EDGjhmIMCnw,5400,5542,426-134-983-691,29.97,1280,720,train +EDGjhmIMCnw,6330,6497,342-132-897-687,29.97,1280,720,train +OiblkvkAHWM,2280,2440,256-118-832-694,25.0,1280,720,test +OiblkvkAHWM,2575,2763,385-63-1018-696,25.0,1280,720,test +OiblkvkAHWM,3461,3616,315-81-935-701,25.0,1280,720,test +OiblkvkAHWM,3931,4079,281-124-846-689,25.0,1280,720,test +OiblkvkAHWM,4079,4212,350-33-1017-700,25.0,1280,720,test +OiblkvkAHWM,4695,4957,242-80-860-698,25.0,1280,720,test +OiblkvkAHWM,5484,5794,316-123-903-710,25.0,1280,720,test +OiblkvkAHWM,5980,6116,256-112-852-708,25.0,1280,720,test +OiblkvkAHWM,6251,6533,263-10-969-716,25.0,1280,720,test +OiblkvkAHWM,7654,8070,274-16-965-707,25.0,1280,720,test +OiblkvkAHWM,8617,8768,597-34-1273-710,25.0,1280,720,test +OiblkvkAHWM,10080,10217,205-30-877-702,25.0,1280,720,test +OiblkvkAHWM,10498,10640,305-60-955-710,25.0,1280,720,test +OiblkvkAHWM,11052,11181,244-65-881-702,25.0,1280,720,test +OiblkvkAHWM,14186,14331,247-38-917-708,25.0,1280,720,test +OiblkvkAHWM,14331,14459,264-5-977-718,25.0,1280,720,test +kfMckLkZz-4,249,430,182-42-604-464,29.97,640,480,train +kfMckLkZz-4,668,816,22-50-398-426,29.97,640,480,train +kfMckLkZz-4,2935,3128,86-27-528-469,29.97,640,480,train +kfMckLkZz-4,4062,4284,204-93-562-451,29.97,640,480,train +kfMckLkZz-4,4402,4580,230-80-609-459,29.97,640,480,train +XM-YfeodJq4,909,1087,492-37-1458-1003,29.97,1920,1080,train +XM-YfeodJq4,1309,1500,405-1-1474-1070,29.97,1920,1080,train +XM-YfeodJq4,4932,5073,641-4-1569-932,29.97,1920,1080,train +-kJzyrIM1cg,225,1249,436-135-1332-1031,25.0,1920,1080,train +-kJzyrIM1cg,1249,2273,518-135-1423-1040,25.0,1920,1080,train +-kJzyrIM1cg,2273,3297,557-144-1449-1036,25.0,1920,1080,train +-kJzyrIM1cg,3297,3552,533-135-1411-1013,25.0,1920,1080,train +-kJzyrIM1cg,3552,4063,534-130-1459-1055,25.0,1920,1080,train +-kJzyrIM1cg,4063,5087,620-130-1519-1029,25.0,1920,1080,train +-kJzyrIM1cg,5087,5345,574-155-1458-1039,25.0,1920,1080,train +-kJzyrIM1cg,5594,5861,504-107-1404-1007,25.0,1920,1080,train +-kJzyrIM1cg,6067,6227,581-178-1436-1033,25.0,1920,1080,train +-kJzyrIM1cg,6227,6569,532-137-1399-1004,25.0,1920,1080,train +-kJzyrIM1cg,6569,6814,684-172-1494-982,25.0,1920,1080,train +-kJzyrIM1cg,6912,7218,562-171-1385-994,25.0,1920,1080,train +-kJzyrIM1cg,7363,7547,555-159-1410-1014,25.0,1920,1080,train +-kJzyrIM1cg,7740,7920,496-133-1366-1003,25.0,1920,1080,train +-kJzyrIM1cg,7980,8110,492-134-1387-1029,25.0,1920,1080,train +-kJzyrIM1cg,8345,8658,502-108-1414-1020,25.0,1920,1080,train +-kJzyrIM1cg,8658,8968,351-65-1288-1002,25.0,1920,1080,train +-kJzyrIM1cg,8968,9113,370-105-1302-1037,25.0,1920,1080,train +-kJzyrIM1cg,9132,9313,501-117-1460-1076,25.0,1920,1080,train +-kJzyrIM1cg,9317,9474,572-194-1414-1036,25.0,1920,1080,train +-kJzyrIM1cg,9474,9628,527-153-1358-984,25.0,1920,1080,train +-kJzyrIM1cg,9878,10478,498-171-1326-999,25.0,1920,1080,train +-kJzyrIM1cg,10527,10752,475-85-1390-1000,25.0,1920,1080,train +-kJzyrIM1cg,10789,10970,479-126-1362-1009,25.0,1920,1080,train +-kJzyrIM1cg,10971,11124,601-216-1386-1001,25.0,1920,1080,train +-kJzyrIM1cg,11282,11482,616-172-1358-914,25.0,1920,1080,train +-kJzyrIM1cg,11557,11729,271-162-1089-980,25.0,1920,1080,train +-kJzyrIM1cg,11729,11935,560-193-1332-965,25.0,1920,1080,train +-kJzyrIM1cg,11935,12166,569-185-1386-1002,25.0,1920,1080,train +-kJzyrIM1cg,12318,12662,401-131-1311-1041,25.0,1920,1080,train +-kJzyrIM1cg,12809,13094,495-138-1371-1014,25.0,1920,1080,train +-kJzyrIM1cg,13350,13779,535-111-1445-1021,25.0,1920,1080,train +-kJzyrIM1cg,14036,15060,446-122-1403-1079,25.0,1920,1080,train +-kJzyrIM1cg,15133,15308,548-140-1405-997,25.0,1920,1080,train +-kJzyrIM1cg,15308,15450,537-185-1391-1039,25.0,1920,1080,train +-kJzyrIM1cg,15599,16197,466-117-1373-1024,25.0,1920,1080,train +-kJzyrIM1cg,16197,16435,510-172-1404-1066,25.0,1920,1080,train +-kJzyrIM1cg,16471,16659,585-174-1486-1075,25.0,1920,1080,train +-kJzyrIM1cg,16659,17683,412-175-1279-1042,25.0,1920,1080,train +-kJzyrIM1cg,17703,17993,484-126-1352-994,25.0,1920,1080,train +-kJzyrIM1cg,18055,19079,415-68-1399-1052,25.0,1920,1080,train +-kJzyrIM1cg,19105,19403,502-178-1387-1063,25.0,1920,1080,train +-kJzyrIM1cg,19417,19617,705-128-1656-1079,25.0,1920,1080,train +-kJzyrIM1cg,19846,20335,625-169-1501-1045,25.0,1920,1080,train +-kJzyrIM1cg,20963,21092,786-208-1461-883,25.0,1920,1080,train +0EBa26jUU1g,5,142,137-91-460-414,30.0,640,480,train +0EBa26jUU1g,142,535,129-105-454-430,30.0,640,480,train +0EBa26jUU1g,1233,1386,97-127-406-436,30.0,640,480,train +YVgja2MPa1U,0,406,371-119-1250-998,25.0,1920,1080,train +YVgja2MPa1U,414,556,520-207-1308-995,25.0,1920,1080,train +YVgja2MPa1U,748,948,412-122-1347-1057,25.0,1920,1080,train +YVgja2MPa1U,2280,2470,532-189-1399-1056,25.0,1920,1080,train +YVgja2MPa1U,2795,2940,613-249-1385-1021,25.0,1920,1080,train +YVgja2MPa1U,3524,3686,626-265-1432-1071,25.0,1920,1080,train +YVgja2MPa1U,5360,5588,446-45-1469-1068,25.0,1920,1080,train +YVgja2MPa1U,8115,8253,431-203-1294-1066,25.0,1920,1080,train +YVgja2MPa1U,8315,8576,330-118-1280-1068,25.0,1920,1080,train +PvjYVsRK4Dg,2334,2475,91-32-528-469,29.97,640,480,train +PvjYVsRK4Dg,8180,8558,80-32-516-468,29.97,640,480,train +PvjYVsRK4Dg,8559,8768,61-41-488-468,29.97,640,480,train +PvjYVsRK4Dg,8800,9045,62-41-487-466,29.97,640,480,train +PvjYVsRK4Dg,9096,9440,59-43-489-473,29.97,640,480,train +PvjYVsRK4Dg,9451,9738,63-44-480-461,29.97,640,480,train +PvjYVsRK4Dg,9756,10191,90-31-530-471,29.97,640,480,train +JoFxJXTe2IM,1167,1349,131-62-483-414,30.0,640,480,train +JoFxJXTe2IM,1367,1588,143-71-487-415,30.0,640,480,train +JoFxJXTe2IM,1615,1807,139-52-533-446,30.0,640,480,train +JoFxJXTe2IM,1821,2164,131-60-496-425,30.0,640,480,train +JoFxJXTe2IM,2313,2665,162-82-490-410,30.0,640,480,train +JoFxJXTe2IM,2665,2836,161-78-501-418,30.0,640,480,train +JoFxJXTe2IM,2902,3073,149-97-491-439,30.0,640,480,train +JoFxJXTe2IM,3073,3511,156-81-501-426,30.0,640,480,train +JoFxJXTe2IM,3551,3832,98-72-457-431,30.0,640,480,train +JoFxJXTe2IM,3832,3973,147-54-511-418,30.0,640,480,train +JoFxJXTe2IM,4120,4791,116-55-481-420,30.0,640,480,train +JoFxJXTe2IM,5668,5839,114-111-419-416,30.0,640,480,train +JoFxJXTe2IM,6019,6170,145-81-466-402,30.0,640,480,train +JoFxJXTe2IM,7003,7161,153-74-495-416,30.0,640,480,train +JoFxJXTe2IM,7161,7331,135-54-496-415,30.0,640,480,train +JoFxJXTe2IM,7990,8253,128-75-462-409,30.0,640,480,train +JoFxJXTe2IM,8426,8579,161-86-472-397,30.0,640,480,train +JoFxJXTe2IM,8734,8871,146-90-472-416,30.0,640,480,train +JoFxJXTe2IM,9077,9362,179-87-497-405,30.0,640,480,train +JoFxJXTe2IM,9410,9698,172-66-522-416,30.0,640,480,train +JoFxJXTe2IM,9830,9994,184-92-497-405,30.0,640,480,train +JoFxJXTe2IM,10574,10759,214-63-566-415,30.0,640,480,train +JoFxJXTe2IM,11127,11343,130-82-467-419,30.0,640,480,train +JoFxJXTe2IM,11511,11644,239-99-531-391,30.0,640,480,train +JoFxJXTe2IM,11893,12027,179-89-501-411,30.0,640,480,train +JoFxJXTe2IM,12121,12273,203-140-488-425,30.0,640,480,train +JoFxJXTe2IM,12412,12582,152-104-453-405,30.0,640,480,train +SdT1S9ZL4L4,1920,2199,247-50-605-408,29.97,854,480,train +SdT1S9ZL4L4,4818,4971,246-26-601-381,29.97,854,480,train +SdT1S9ZL4L4,5544,5681,261-10-640-389,29.97,854,480,train +SdT1S9ZL4L4,6613,6751,241-32-569-360,29.97,854,480,train +J4WBg84_30s,5,427,310-62-923-675,25.0,1280,720,train +J4WBg84_30s,427,638,415-86-1040-711,25.0,1280,720,train +TWZGnwNe2jY,920,1078,539-67-1527-1055,29.97,1920,1080,train +wKkuvbzNt5U,183,477,318-275-699-656,25.0,960,720,train +wKkuvbzNt5U,514,665,305-299-682-676,25.0,960,720,train +wKkuvbzNt5U,1114,1245,267-290-676-699,25.0,960,720,train +wKkuvbzNt5U,4462,4848,243-293-662-712,25.0,960,720,train +wKkuvbzNt5U,4919,5290,203-269-646-712,25.0,960,720,train +VidZQ6yA7I4,201,336,820-97-1315-592,29.97,1920,1080,train +VidZQ6yA7I4,660,802,704-125-1186-607,29.97,1920,1080,train +VidZQ6yA7I4,835,1014,810-138-1261-589,29.97,1920,1080,train +VidZQ6yA7I4,1647,1775,990-127-1450-587,29.97,1920,1080,train +VidZQ6yA7I4,1845,1985,1013-109-1523-619,29.97,1920,1080,train +VidZQ6yA7I4,2167,2336,670-125-1183-638,29.97,1920,1080,train +VidZQ6yA7I4,2807,2991,505-137-989-621,29.97,1920,1080,train +VidZQ6yA7I4,4054,4846,955-0-1776-820,29.97,1920,1080,train +VidZQ6yA7I4,5086,5478,525-0-1180-655,29.97,1920,1080,train +VidZQ6yA7I4,5537,5666,778-109-1323-654,29.97,1920,1080,train +VidZQ6yA7I4,5848,5978,832-98-1366-632,29.97,1920,1080,train +VidZQ6yA7I4,6617,6750,342-104-867-629,29.97,1920,1080,train +VidZQ6yA7I4,6750,6888,464-112-988-636,29.97,1920,1080,train +B2zZOiUBzZc,1201,2225,546-148-1387-989,25.0,1920,1080,train +B2zZOiUBzZc,2225,2517,661-282-1325-946,25.0,1920,1080,train +B2zZOiUBzZc,2779,3006,635-217-1372-954,25.0,1920,1080,train +B2zZOiUBzZc,3066,3355,731-189-1542-1000,25.0,1920,1080,train +B2zZOiUBzZc,3420,3651,490-181-1278-969,25.0,1920,1080,train +B2zZOiUBzZc,4284,4439,341-281-996-936,25.0,1920,1080,train +B2zZOiUBzZc,4512,4653,407-247-1077-917,25.0,1920,1080,train +B2zZOiUBzZc,4653,4933,432-213-1138-919,25.0,1920,1080,train +B2zZOiUBzZc,5297,5666,629-188-1432-991,25.0,1920,1080,train +B2zZOiUBzZc,5963,6091,675-268-1328-921,25.0,1920,1080,train +B2zZOiUBzZc,6285,6465,461-270-1120-929,25.0,1920,1080,train +B2zZOiUBzZc,6509,6720,295-290-904-899,25.0,1920,1080,train +B2zZOiUBzZc,6824,7014,604-262-1235-893,25.0,1920,1080,train +B2zZOiUBzZc,7097,7262,631-232-1297-898,25.0,1920,1080,train +B2zZOiUBzZc,9388,9725,676-141-1551-1016,25.0,1920,1080,train +B2zZOiUBzZc,9725,9859,565-203-1333-971,25.0,1920,1080,train +B2zZOiUBzZc,10570,10964,547-265-1237-955,25.0,1920,1080,train +B2zZOiUBzZc,11387,11745,695-250-1408-963,25.0,1920,1080,train +B2zZOiUBzZc,11826,11963,539-238-1278-977,25.0,1920,1080,train +B2zZOiUBzZc,11963,12094,707-308-1375-976,25.0,1920,1080,train +B2zZOiUBzZc,12626,13564,691-52-1641-1002,25.0,1920,1080,train +B2zZOiUBzZc,13781,13940,450-177-891-618,25.0,1920,1080,train +1cHighu8eCo,18,253,121-81-492-452,29.97,640,480,train +1cHighu8eCo,407,973,133-68-519-454,29.97,640,480,train +1cHighu8eCo,1124,1338,151-92-491-432,29.97,640,480,train +1cHighu8eCo,2037,2201,235-103-555-423,29.97,640,480,train +1cHighu8eCo,2472,2757,205-77-548-420,29.97,640,480,train +1cHighu8eCo,3111,3283,146-77-476-407,29.97,640,480,train +1cHighu8eCo,3611,3808,100-92-428-420,29.97,640,480,train +1cHighu8eCo,4953,5197,111-13-570-472,29.97,640,480,train +1cHighu8eCo,5619,5802,153-77-532-456,29.97,640,480,train +1cHighu8eCo,7079,7346,89-48-442-401,29.97,640,480,train +1cHighu8eCo,7394,7587,146-74-500-428,29.97,640,480,train +1cHighu8eCo,8082,8345,137-88-481-432,29.97,640,480,train +1cHighu8eCo,8928,9137,221-88-562-429,29.97,640,480,train +1cHighu8eCo,10401,10703,238-104-554-420,29.97,640,480,train +1cHighu8eCo,11779,11995,129-73-503-447,29.97,640,480,train +oTmbnz-bRE0,109,942,344-151-787-594,29.97,1280,720,train +oTmbnz-bRE0,1037,1207,290-164-719-593,29.97,1280,720,train +oTmbnz-bRE0,1357,1526,496-210-875-589,29.97,1280,720,train +oTmbnz-bRE0,1760,1922,371-203-746-578,29.97,1280,720,train +oTmbnz-bRE0,1942,2082,448-169-845-566,29.97,1280,720,train +oTmbnz-bRE0,2397,2642,248-168-666-586,29.97,1280,720,train +oTmbnz-bRE0,2758,3011,370-212-741-583,29.97,1280,720,train +oTmbnz-bRE0,3450,3631,429-200-826-597,29.97,1280,720,train +oTmbnz-bRE0,3862,4088,285-173-726-614,29.97,1280,720,train +oTmbnz-bRE0,4243,4384,441-191-857-607,29.97,1280,720,train +oTmbnz-bRE0,4897,5077,278-211-711-644,29.97,1280,720,train +oTmbnz-bRE0,5341,5498,160-181-623-644,29.97,1280,720,train +oTmbnz-bRE0,5666,5820,320-213-754-647,29.97,1280,720,train +oTmbnz-bRE0,6238,6408,302-151-749-598,29.97,1280,720,train +oTmbnz-bRE0,6879,7047,485-175-887-577,29.97,1280,720,train +oTmbnz-bRE0,7061,7229,523-131-983-591,29.97,1280,720,train +oTmbnz-bRE0,8022,8321,292-146-765-619,29.97,1280,720,train +oTmbnz-bRE0,8462,8656,385-184-811-610,29.97,1280,720,train +oTmbnz-bRE0,8656,8877,283-163-737-617,29.97,1280,720,train +oTmbnz-bRE0,9161,9432,385-214-784-613,29.97,1280,720,train +oTmbnz-bRE0,9567,9732,204-189-653-638,29.97,1280,720,train +oTmbnz-bRE0,9806,9956,316-230-747-661,29.97,1280,720,train +oTmbnz-bRE0,9956,10106,370-210-792-632,29.97,1280,720,train +oTmbnz-bRE0,10797,10962,244-145-720-621,29.97,1280,720,train +oTmbnz-bRE0,11147,11276,440-166-892-618,29.97,1280,720,train +oTmbnz-bRE0,11427,11657,290-209-702-621,29.97,1280,720,train +oTmbnz-bRE0,11960,12117,208-163-703-658,29.97,1280,720,train +oTmbnz-bRE0,12637,12858,391-125-905-639,29.97,1280,720,train +oTmbnz-bRE0,12972,13128,149-153-658-662,29.97,1280,720,train +oTmbnz-bRE0,13160,13326,416-165-869-618,29.97,1280,720,train +oTmbnz-bRE0,13328,13691,356-162-808-614,29.97,1280,720,train +oTmbnz-bRE0,13691,14002,369-161-844-636,29.97,1280,720,train +oTmbnz-bRE0,14019,14331,287-83-835-631,29.97,1280,720,train +wPTaGTzwRDI,82,884,128-72-450-394,25.0,640,480,train +wPTaGTzwRDI,884,1052,100-114-389-403,25.0,640,480,train +wPTaGTzwRDI,1052,1399,115-99-418-402,25.0,640,480,train +wPTaGTzwRDI,1399,1583,92-111-383-402,25.0,640,480,train +wPTaGTzwRDI,2078,2418,91-110-383-402,25.0,640,480,train +wPTaGTzwRDI,2504,2670,159-83-475-399,25.0,640,480,train +wPTaGTzwRDI,2670,3335,140-67-475-402,25.0,640,480,train +wPTaGTzwRDI,3335,3904,212-105-509-402,25.0,640,480,train +wPTaGTzwRDI,4476,4872,208-99-515-406,25.0,640,480,train +wPTaGTzwRDI,4976,5251,110-71-434-395,25.0,640,480,train +hYZDOpHYgLM,5477,5667,235-233-981-979,29.97,1920,1080,train +hYZDOpHYgLM,6080,6230,644-274-1337-967,29.97,1920,1080,train +hYZDOpHYgLM,7337,7502,374-237-1132-995,29.97,1920,1080,train +hYZDOpHYgLM,7827,8062,297-290-960-953,29.97,1920,1080,train +LstLDRUBAp4,0,135,423-35-1380-992,25.0,1920,1080,train +LstLDRUBAp4,270,593,460-30-1471-1041,25.0,1920,1080,train +LstLDRUBAp4,675,810,494-8-1509-1023,25.0,1920,1080,train +LstLDRUBAp4,2295,2430,444-16-1463-1035,25.0,1920,1080,train +LstLDRUBAp4,2430,2565,491-146-1380-1035,25.0,1920,1080,train +LstLDRUBAp4,2596,2726,613-256-1287-930,25.0,1920,1080,train +_cIRyxE3Kf8,1983,2675,111-50-515-454,25.0,640,480,train +_cIRyxE3Kf8,2970,3293,115-5-562-452,25.0,640,480,train +_cIRyxE3Kf8,4056,4216,46-7-504-465,25.0,640,480,train +_cIRyxE3Kf8,4946,5763,144-37-550-443,25.0,640,480,train +_cIRyxE3Kf8,5763,5999,89-52-498-461,25.0,640,480,train +knidf7i-J3Y,95,323,317-139-824-646,29.97,1280,720,train +knidf7i-J3Y,534,1558,215-1-879-665,29.97,1280,720,train +knidf7i-J3Y,2217,2434,300-157-779-636,29.97,1280,720,train +knidf7i-J3Y,2531,2714,277-150-765-638,29.97,1280,720,train +knidf7i-J3Y,2743,2975,388-225-805-642,29.97,1280,720,train +knidf7i-J3Y,3181,3506,402-182-858-638,29.97,1280,720,train +knidf7i-J3Y,3600,3750,273-162-759-648,29.97,1280,720,train +knidf7i-J3Y,3766,3995,284-150-760-626,29.97,1280,720,train +knidf7i-J3Y,3995,4485,283-166-768-651,29.97,1280,720,train +knidf7i-J3Y,4635,4877,321-147-814-640,29.97,1280,720,train +knidf7i-J3Y,4951,5132,312-215-753-656,29.97,1280,720,train +knidf7i-J3Y,5132,5370,324-193-764-633,29.97,1280,720,train +knidf7i-J3Y,5650,6000,268-25-883-640,29.97,1280,720,train +svFggnyI4d8,293,481,1027-268-1829-1070,29.97,1920,1080,train +svFggnyI4d8,843,1018,1104-237-1916-1049,29.97,1920,1080,train +svFggnyI4d8,1379,1541,867-132-1652-917,29.97,1920,1080,train +svFggnyI4d8,3752,3919,604-205-1424-1025,29.97,1920,1080,train +svFggnyI4d8,5259,5425,733-184-1588-1039,29.97,1920,1080,train +svFggnyI4d8,6260,6412,812-282-1575-1045,29.97,1920,1080,train +svFggnyI4d8,7079,7279,459-120-1336-997,29.97,1920,1080,train +0mpd65lmphU,0,163,330-220-607-497,25.0,1280,720,train +0mpd65lmphU,0,299,918-216-1214-512,25.0,1280,720,train +0mpd65lmphU,353,535,905-230-1166-491,25.0,1280,720,train +0mpd65lmphU,1163,1698,913-219-1184-490,25.0,1280,720,train +0mpd65lmphU,1799,2003,353-204-637-488,25.0,1280,720,train +0mpd65lmphU,1932,2262,919-226-1184-491,25.0,1280,720,train +0mpd65lmphU,2459,2661,244-208-524-488,25.0,1280,720,train +0mpd65lmphU,2315,2672,829-224-1108-503,25.0,1280,720,train +0mpd65lmphU,2807,3111,707-210-1001-504,25.0,1280,720,train +0mpd65lmphU,3180,3433,143-223-416-496,25.0,1280,720,train +0mpd65lmphU,3433,3608,163-208-448-493,25.0,1280,720,train +0mpd65lmphU,3176,3745,666-226-964-524,25.0,1280,720,train +0mpd65lmphU,3745,4129,689-212-994-517,25.0,1280,720,train +0mpd65lmphU,3825,4216,115-199-420-504,25.0,1280,720,train +0mpd65lmphU,4140,4436,710-228-993-511,25.0,1280,720,train +YoABR3mDsIg,1160,1315,355-156-865-666,29.97,1280,720,train +YoABR3mDsIg,1413,1903,379-183-863-667,29.97,1280,720,train +YoABR3mDsIg,1979,2139,439-199-910-670,29.97,1280,720,train +YoABR3mDsIg,2220,2433,423-214-872-663,29.97,1280,720,train +YoABR3mDsIg,2810,2999,491-183-949-641,29.97,1280,720,train +YoABR3mDsIg,3080,3250,475-189-929-643,29.97,1280,720,train +YoABR3mDsIg,9740,9873,357-169-853-665,29.97,1280,720,train +YoABR3mDsIg,9948,10085,354-195-827-668,29.97,1280,720,train +YoABR3mDsIg,10085,10331,250-192-723-665,29.97,1280,720,train +YoABR3mDsIg,10578,10714,257-212-744-699,29.97,1280,720,train +YoABR3mDsIg,10714,10893,218-189-727-698,29.97,1280,720,train +YoABR3mDsIg,11261,11456,176-182-705-711,29.97,1280,720,train +YoABR3mDsIg,11551,11740,208-166-744-702,29.97,1280,720,train +8eflkNicY-E,945,1125,431-110-961-640,25.0,1280,720,train +8eflkNicY-E,2011,2153,415-169-915-669,25.0,1280,720,train +8eflkNicY-E,2398,2580,367-131-868-632,25.0,1280,720,train +8eflkNicY-E,2713,2843,319-142-858-681,25.0,1280,720,train +8eflkNicY-E,3587,3734,264-122-815-673,25.0,1280,720,train +EswKOjV3isk,427,1451,117-36-542-461,29.97,640,480,train +EswKOjV3isk,1451,2475,127-59-522-454,29.97,640,480,train +EswKOjV3isk,2475,3499,131-64-521-454,29.97,640,480,train +EswKOjV3isk,3499,3900,131-66-517-452,29.97,640,480,train +EswKOjV3isk,3900,4924,132-67-517-452,29.97,640,480,train +EswKOjV3isk,4924,5948,130-63-521-454,29.97,640,480,train +EswKOjV3isk,5948,6972,130-64-520-454,29.97,640,480,train +EswKOjV3isk,6972,7996,128-61-520-453,29.97,640,480,train +EswKOjV3isk,7996,8832,125-58-522-455,29.97,640,480,train +EswKOjV3isk,8832,8960,122-37-544-459,29.97,640,480,train +uggNT3QkTYQ,0,180,310-299-605-594,15.0,960,720,train +uggNT3QkTYQ,1235,1372,292-243-683-634,15.0,960,720,train +uggNT3QkTYQ,1386,1536,301-211-743-653,15.0,960,720,train +uggNT3QkTYQ,2779,2925,314-112-883-681,15.0,960,720,train +zBJADM80dPQ,420,992,447-127-935-615,29.97,1280,720,train +zBJADM80dPQ,1906,2109,578-60-1180-662,29.97,1280,720,train +zBJADM80dPQ,3184,3336,352-148-875-671,29.97,1280,720,train +zBJADM80dPQ,3393,3711,126-29-732-635,29.97,1280,720,train +AN62b1UpmcE,342,737,372-186-758-572,25.0,1280,720,train +AN62b1UpmcE,737,883,383-245-725-587,25.0,1280,720,train +AN62b1UpmcE,883,1209,367-227-731-591,25.0,1280,720,train +AN62b1UpmcE,1214,1406,352-210-744-602,25.0,1280,720,train +AN62b1UpmcE,1406,1736,321-211-736-626,25.0,1280,720,train +AN62b1UpmcE,3445,3584,22-10-667-655,25.0,1280,720,train +UU9-XEvF5X4,214,514,350-166-881-697,30.0,1280,720,train +UU9-XEvF5X4,514,1059,371-137-938-704,30.0,1280,720,train +UU9-XEvF5X4,1060,1252,378-159-922-703,30.0,1280,720,train +UU9-XEvF5X4,1252,1498,365-128-938-701,30.0,1280,720,train +UU9-XEvF5X4,1499,2353,376-158-922-704,30.0,1280,720,train +UU9-XEvF5X4,2370,3240,275-52-941-718,30.0,1280,720,train +UU9-XEvF5X4,3811,4835,307-3-1010-706,30.0,1280,720,train +UU9-XEvF5X4,4924,5948,301-31-984-714,30.0,1280,720,train +UU9-XEvF5X4,6144,7168,254-90-881-717,30.0,1280,720,train +UU9-XEvF5X4,7168,8018,340-183-858-701,30.0,1280,720,train +UU9-XEvF5X4,14225,15249,343-156-906-719,30.0,1280,720,train +UU9-XEvF5X4,15249,15422,335-154-891-710,30.0,1280,720,train +UU9-XEvF5X4,16589,16717,314-108-918-712,30.0,1280,720,train +UU9-XEvF5X4,19207,19770,367-161-903-697,30.0,1280,720,train +UU9-XEvF5X4,21551,22575,308-21-975-688,30.0,1280,720,train +UU9-XEvF5X4,22580,23082,382-121-934-673,30.0,1280,720,train +UU9-XEvF5X4,23082,23377,338-158-881-701,30.0,1280,720,train +UU9-XEvF5X4,23377,24401,392-131-954-693,30.0,1280,720,train +UU9-XEvF5X4,24401,24680,397-170-884-657,30.0,1280,720,train +UU9-XEvF5X4,24765,25094,426-133-960-667,30.0,1280,720,train +UU9-XEvF5X4,25094,26118,407-137-964-694,30.0,1280,720,train +UU9-XEvF5X4,26118,27142,389-139-941-691,30.0,1280,720,train +UU9-XEvF5X4,27290,27965,340-80-952-692,30.0,1280,720,train +UU9-XEvF5X4,27965,28305,250-36-920-706,30.0,1280,720,train +UU9-XEvF5X4,29032,29256,281-106-871-696,30.0,1280,720,train +UU9-XEvF5X4,29265,29501,319-114-891-686,30.0,1280,720,train +UU9-XEvF5X4,29501,30026,334-90-932-688,30.0,1280,720,train +UU9-XEvF5X4,30466,30699,282-8-990-716,30.0,1280,720,train +UU9-XEvF5X4,30755,30925,279-3-993-717,30.0,1280,720,train +UU9-XEvF5X4,31271,31490,267-39-920-692,30.0,1280,720,train +UU9-XEvF5X4,31580,32098,289-63-923-697,30.0,1280,720,train +UU9-XEvF5X4,32098,33122,259-22-947-710,30.0,1280,720,train +UU9-XEvF5X4,33296,33465,279-128-852-701,30.0,1280,720,train +UU9-XEvF5X4,33501,33697,275-121-855-701,30.0,1280,720,train +UU9-XEvF5X4,34758,34954,296-43-957-704,30.0,1280,720,train +UU9-XEvF5X4,35111,35246,292-58-945-711,30.0,1280,720,train +UU9-XEvF5X4,35545,35783,271-34-953-716,30.0,1280,720,train +UU9-XEvF5X4,35899,36032,376-151-918-693,30.0,1280,720,train +UU9-XEvF5X4,36273,36476,406-152-964-710,30.0,1280,720,train +UU9-XEvF5X4,38680,39383,369-146-903-680,30.0,1280,720,train +UU9-XEvF5X4,39383,40407,364-111-931-678,30.0,1280,720,train +UU9-XEvF5X4,40407,40939,358-122-918-682,30.0,1280,720,train +UU9-XEvF5X4,49635,49824,263-3-930-670,30.0,1280,720,train +UU9-XEvF5X4,52401,52598,484-104-1042-662,30.0,1280,720,train +UU9-XEvF5X4,52983,53208,336-112-899-675,30.0,1280,720,train +UU9-XEvF5X4,53475,53890,364-100-983-719,30.0,1280,720,train +UU9-XEvF5X4,54205,54334,69-128-649-708,30.0,1280,720,train +UU9-XEvF5X4,54522,54706,311-75-949-713,30.0,1280,720,train +UU9-XEvF5X4,55529,55665,379-27-1060-708,30.0,1280,720,train +UU9-XEvF5X4,57251,57789,398-45-1069-716,30.0,1280,720,train +UU9-XEvF5X4,58567,58816,105-59-753-707,30.0,1280,720,train +UU9-XEvF5X4,60005,60201,25-18-714-707,30.0,1280,720,train +UU9-XEvF5X4,61363,62161,511-16-1175-680,30.0,1280,720,train +UU9-XEvF5X4,62431,62894,679-93-1242-656,30.0,1280,720,train +UU9-XEvF5X4,63139,63682,308-83-943-718,30.0,1280,720,train +UU9-XEvF5X4,63714,64162,351-11-1042-702,30.0,1280,720,train +UU9-XEvF5X4,64185,64513,418-10-1126-718,30.0,1280,720,train +UU9-XEvF5X4,64751,65404,248-101-819-672,30.0,1280,720,train +UU9-XEvF5X4,67077,67236,621-81-1235-695,30.0,1280,720,train +UU9-XEvF5X4,67265,67395,370-95-993-718,30.0,1280,720,train +UU9-XEvF5X4,69598,69942,471-126-962-617,30.0,1280,720,train +UU9-XEvF5X4,70047,70298,430-129-976-675,30.0,1280,720,train +UU9-XEvF5X4,70450,70602,472-120-1050-698,30.0,1280,720,train +UU9-XEvF5X4,70666,71075,371-107-964-700,30.0,1280,720,train +UU9-XEvF5X4,71344,71656,303-62-953-712,30.0,1280,720,train +UU9-XEvF5X4,72135,72283,478-87-1105-714,30.0,1280,720,train +UU9-XEvF5X4,72413,72553,597-68-1228-699,30.0,1280,720,train +UU9-XEvF5X4,72848,73155,560-4-1267-711,30.0,1280,720,train +UU9-XEvF5X4,74833,74996,421-106-1028-713,30.0,1280,720,train +UU9-XEvF5X4,76104,76503,316-77-957-718,30.0,1280,720,train +UU9-XEvF5X4,76689,77143,460-88-1088-716,30.0,1280,720,train +UU9-XEvF5X4,77370,77627,355-58-1008-711,30.0,1280,720,train +UU9-XEvF5X4,77721,78156,201-0-919-718,30.0,1280,720,train +Y6DtgVbcD90,180,512,466-170-1308-1012,30.0,1920,1080,train +Y6DtgVbcD90,2644,2880,449-168-1331-1050,30.0,1920,1080,train +Y6DtgVbcD90,8003,8160,408-71-1324-987,30.0,1920,1080,train +niFUaaCwYag,706,1500,391-171-1288-1068,29.97,1920,1080,train +niFUaaCwYag,1500,1660,511-259-1281-1029,29.97,1920,1080,train +niFUaaCwYag,1882,2162,461-220-1297-1056,29.97,1920,1080,train +niFUaaCwYag,2162,2451,458-177-1338-1057,29.97,1920,1080,train +niFUaaCwYag,2549,2880,649-254-1459-1064,29.97,1920,1080,train +niFUaaCwYag,3010,3198,662-254-1487-1079,29.97,1920,1080,train +niFUaaCwYag,3382,3572,638-271-1410-1043,29.97,1920,1080,train +niFUaaCwYag,3986,4160,783-306-1510-1033,29.97,1920,1080,train +niFUaaCwYag,4545,4835,704-360-1391-1047,29.97,1920,1080,train +niFUaaCwYag,6741,7039,408-246-1224-1062,29.97,1920,1080,train +niFUaaCwYag,7313,7459,433-232-1236-1035,29.97,1920,1080,train +niFUaaCwYag,7459,7724,502-192-1349-1039,29.97,1920,1080,train +niFUaaCwYag,7724,7912,536-216-1398-1078,29.97,1920,1080,train +niFUaaCwYag,7948,8273,448-224-1277-1053,29.97,1920,1080,train +niFUaaCwYag,9440,9618,628-241-1449-1062,29.97,1920,1080,train +niFUaaCwYag,10391,10575,827-408-1450-1031,29.97,1920,1080,train +niFUaaCwYag,12164,12342,619-298-1391-1070,29.97,1920,1080,train +niFUaaCwYag,13057,13262,502-201-1349-1048,29.97,1920,1080,train +niFUaaCwYag,13382,13631,301-240-1141-1080,29.97,1920,1080,train +niFUaaCwYag,13753,14084,320-84-1280-1044,29.97,1920,1080,train +niFUaaCwYag,14451,14704,227-59-1248-1080,29.97,1920,1080,train +niFUaaCwYag,14993,15127,475-240-1306-1071,29.97,1920,1080,train +niFUaaCwYag,15365,15581,673-219-1534-1080,29.97,1920,1080,train +niFUaaCwYag,15765,15984,596-194-1428-1026,29.97,1920,1080,train +niFUaaCwYag,16246,16429,628-261-1372-1005,29.97,1920,1080,train +niFUaaCwYag,16454,16678,606-316-1321-1031,29.97,1920,1080,train +niFUaaCwYag,16698,16880,742-278-1487-1023,29.97,1920,1080,train +niFUaaCwYag,17027,17171,827-357-1472-1002,29.97,1920,1080,train +niFUaaCwYag,17171,17387,857-326-1500-969,29.97,1920,1080,train +niFUaaCwYag,17825,17975,763-314-1450-1001,29.97,1920,1080,train +niFUaaCwYag,18414,18548,717-222-1569-1074,29.97,1920,1080,train +niFUaaCwYag,19161,19305,380-128-1330-1078,29.97,1920,1080,train +niFUaaCwYag,19705,19918,800-201-1647-1048,29.97,1920,1080,train +niFUaaCwYag,19930,20284,682-203-1529-1050,29.97,1920,1080,train +niFUaaCwYag,20505,20673,667-283-1452-1068,29.97,1920,1080,train +niFUaaCwYag,20673,20893,649-244-1472-1067,29.97,1920,1080,train +niFUaaCwYag,21645,21805,408-231-1248-1071,29.97,1920,1080,train +ab28GAufK8o,261,596,175-63-551-439,29.97,640,480,test +ab28GAufK8o,832,976,122-88-445-411,29.97,640,480,test +ab28GAufK8o,997,1200,139-78-473-412,29.97,640,480,test +ab28GAufK8o,1353,1493,125-100-470-445,29.97,640,480,test +ab28GAufK8o,1591,1826,53-87-398-432,29.97,640,480,test +ab28GAufK8o,2018,2163,56-48-430-422,29.97,640,480,test +ab28GAufK8o,2361,2499,178-110-533-465,29.97,640,480,test +XYuT0U0K5CA,0,339,417-115-957-655,50.0,1280,720,train +XYuT0U0K5CA,339,1363,363-117-919-673,50.0,1280,720,train +XYuT0U0K5CA,1581,2605,297-117-852-672,50.0,1280,720,train +XYuT0U0K5CA,2605,2993,304-127-851-674,50.0,1280,720,train +XYuT0U0K5CA,3014,3389,405-154-936-685,50.0,1280,720,train +XYuT0U0K5CA,3449,3582,323-180-830-687,50.0,1280,720,train +XYuT0U0K5CA,3582,3924,350-155-881-686,50.0,1280,720,train +XYuT0U0K5CA,3930,4432,267-154-801-688,50.0,1280,720,train +XYuT0U0K5CA,4449,5023,267-154-807-694,50.0,1280,720,train +XYuT0U0K5CA,5147,5416,361-7-1007-653,50.0,1280,720,train +XYuT0U0K5CA,5416,5612,396-163-912-679,50.0,1280,720,train +XYuT0U0K5CA,5635,5892,467-148-1009-690,50.0,1280,720,train +XYuT0U0K5CA,5893,6139,451-137-986-672,50.0,1280,720,train +XYuT0U0K5CA,6150,6417,478-148-1005-675,50.0,1280,720,train +XYuT0U0K5CA,6417,6895,439-123-982-666,50.0,1280,720,train +XYuT0U0K5CA,6902,7148,475-126-1020-671,50.0,1280,720,train +XYuT0U0K5CA,7151,7341,461-132-999-670,50.0,1280,720,train +XYuT0U0K5CA,7398,7956,409-17-1089-697,50.0,1280,720,train +XYuT0U0K5CA,8980,10004,255-6-936-687,50.0,1280,720,train +XYuT0U0K5CA,10048,10343,242-28-909-695,50.0,1280,720,train +XYuT0U0K5CA,11554,11740,268-121-843-696,50.0,1280,720,train +XYuT0U0K5CA,11789,12813,196-32-860-696,50.0,1280,720,train +XYuT0U0K5CA,12813,13039,173-45-829-701,50.0,1280,720,train +XYuT0U0K5CA,13066,13227,225-1-930-706,50.0,1280,720,train +XYuT0U0K5CA,14251,14994,127-58-787-718,50.0,1280,720,train +XYuT0U0K5CA,15130,15728,260-50-908-698,50.0,1280,720,train +XYuT0U0K5CA,15736,15989,296-82-926-712,50.0,1280,720,train +XYuT0U0K5CA,15989,16162,273-48-933-708,50.0,1280,720,train +XYuT0U0K5CA,16537,16817,304-101-890-687,50.0,1280,720,train +XYuT0U0K5CA,17383,17539,223-18-907-702,50.0,1280,720,train +XYuT0U0K5CA,17662,18152,399-48-1019-668,50.0,1280,720,train +XYuT0U0K5CA,18152,18350,274-74-875-675,50.0,1280,720,train +XYuT0U0K5CA,18391,18595,250-0-932-682,50.0,1280,720,train +XYuT0U0K5CA,19497,19666,91-17-768-694,50.0,1280,720,train +XYuT0U0K5CA,19780,20027,265-54-893-682,50.0,1280,720,train +XYuT0U0K5CA,20560,20777,81-79-680-678,50.0,1280,720,train +XYuT0U0K5CA,21021,21174,69-5-748-684,50.0,1280,720,train +XYuT0U0K5CA,21314,21526,225-57-848-680,50.0,1280,720,train +XYuT0U0K5CA,21564,21921,82-19-748-685,50.0,1280,720,train +XYuT0U0K5CA,22051,22192,115-49-746-680,50.0,1280,720,train +XYuT0U0K5CA,22311,22466,123-67-736-680,50.0,1280,720,train +XYuT0U0K5CA,22476,23082,185-1-881-697,50.0,1280,720,train +k1RAdWov7kU,52,465,604-208-1340-944,25.0,1920,1080,train +k1RAdWov7kU,465,790,615-225-1368-978,25.0,1920,1080,train +k1RAdWov7kU,790,1168,575-190-1339-954,25.0,1920,1080,train +k1RAdWov7kU,1168,1508,470-150-1281-961,25.0,1920,1080,train +k1RAdWov7kU,1707,2152,491-126-1351-986,25.0,1920,1080,train +k1RAdWov7kU,2152,2288,502-188-1337-1023,25.0,1920,1080,train +k1RAdWov7kU,2411,3435,487-53-1422-988,25.0,1920,1080,train +k1RAdWov7kU,3607,3863,528-93-1411-976,25.0,1920,1080,train +k1RAdWov7kU,4021,4906,523-103-1425-1005,25.0,1920,1080,train +k1RAdWov7kU,5036,5640,446-101-1377-1032,25.0,1920,1080,train +k1RAdWov7kU,5757,5927,361-91-1332-1062,25.0,1920,1080,train +k1RAdWov7kU,5927,6123,471-40-1472-1041,25.0,1920,1080,train +k1RAdWov7kU,6635,6871,403-77-1341-1015,25.0,1920,1080,train +k1RAdWov7kU,6880,7647,390-67-1358-1035,25.0,1920,1080,train +k1RAdWov7kU,8195,8422,464-120-1357-1013,25.0,1920,1080,train +k1RAdWov7kU,8575,8823,487-45-1442-1000,25.0,1920,1080,train +k1RAdWov7kU,8946,9302,353-9-1364-1020,25.0,1920,1080,train +k1RAdWov7kU,9832,10097,493-34-1418-959,25.0,1920,1080,train +k1RAdWov7kU,10226,10360,503-70-1378-945,25.0,1920,1080,train +k1RAdWov7kU,10468,11282,597-85-1522-1010,25.0,1920,1080,train +NXPySOFaBf0,1491,1630,128-45-550-467,29.97,854,480,train +29FLWPEOCMw,5779,6424,195-15-643-463,29.97,854,480,train +29FLWPEOCMw,7448,8472,165-3-625-463,29.97,854,480,train +29FLWPEOCMw,8472,8725,168-34-587-453,29.97,854,480,train +29FLWPEOCMw,8725,9065,174-12-625-463,29.97,854,480,train +29FLWPEOCMw,9369,9892,210-0-665-455,29.97,854,480,train +FzJJ_bWLzcA,315,450,540-216-1386-1062,29.97,1920,1080,train +FzJJ_bWLzcA,1950,2096,575-123-1466-1014,29.97,1920,1080,train +FzJJ_bWLzcA,2502,2805,752-95-1647-990,29.97,1920,1080,train +FzJJ_bWLzcA,3462,3600,452-57-1346-951,29.97,1920,1080,train +FzJJ_bWLzcA,3829,4122,182-56-1031-905,29.97,1920,1080,train +d_kA4yIakbs,4185,4886,282-114-608-440,29.97,854,480,train +d_kA4yIakbs,5023,5303,275-114-579-418,29.97,854,480,train +d_kA4yIakbs,5445,5583,226-125-517-416,29.97,854,480,train +d_kA4yIakbs,5599,5775,312-142-598-428,29.97,854,480,train +d_kA4yIakbs,6152,6282,284-155-560-431,29.97,854,480,train +d_kA4yIakbs,7188,7406,329-147-630-448,29.97,854,480,train +d_kA4yIakbs,7502,7726,347-93-696-442,29.97,854,480,train +7cHsPUybKGg,2452,2582,143-101-429-387,25.0,640,480,train +7cHsPUybKGg,2621,2814,157-68-480-391,25.0,640,480,train +7cHsPUybKGg,2852,2980,179-93-474-388,25.0,640,480,train +7cHsPUybKGg,3235,3386,141-97-429-385,25.0,640,480,train +7cHsPUybKGg,3411,3568,136-52-475-391,25.0,640,480,train +7cHsPUybKGg,3640,3782,178-97-474-393,25.0,640,480,train +7cHsPUybKGg,3827,4164,156-81-470-395,25.0,640,480,train +7cHsPUybKGg,4512,4664,120-22-503-405,25.0,640,480,train +7cHsPUybKGg,4700,4874,137-70-472-405,25.0,640,480,train +7cHsPUybKGg,5587,5721,138-101-478-441,25.0,640,480,train +7cHsPUybKGg,5940,6115,141-83-476-418,25.0,640,480,train +7cHsPUybKGg,6159,6437,120-77-453-410,25.0,640,480,train +LZ2llkUGrsw,3300,3431,58-38-464-444,29.97,640,480,train +LZ2llkUGrsw,4052,4360,121-6-581-466,29.97,640,480,train +LZ2llkUGrsw,4522,4687,129-20-566-457,29.97,640,480,train +LZ2llkUGrsw,4769,4910,128-0-595-467,29.97,640,480,train +LZ2llkUGrsw,6142,6320,165-44-588-467,29.97,640,480,train +LZ2llkUGrsw,6509,6865,130-42-543-455,29.97,640,480,train +LZ2llkUGrsw,7286,7415,89-69-473-453,29.97,640,480,train +LZ2llkUGrsw,8448,8749,140-100-515-475,29.97,640,480,train +LZ2llkUGrsw,8751,9081,130-52-551-473,29.97,640,480,train +LZ2llkUGrsw,9420,10071,84-62-491-469,29.97,640,480,train +LZ2llkUGrsw,10108,10260,137-27-575-465,29.97,640,480,train +LZ2llkUGrsw,11207,11425,93-58-504-469,29.97,640,480,train +LZ2llkUGrsw,11428,11676,100-13-560-473,29.97,640,480,train +LZ2llkUGrsw,12120,12297,151-37-580-466,29.97,640,480,train +LZ2llkUGrsw,12382,12553,114-25-569-480,29.97,640,480,train +LZ2llkUGrsw,12554,12742,126-22-574-470,29.97,640,480,train +Q52BKOPKOA0,381,1405,43-56-675-688,25.0,1280,720,train +Q52BKOPKOA0,1405,2281,248-133-752-637,25.0,1280,720,train +Q52BKOPKOA0,2388,2571,385-154-842-611,25.0,1280,720,train +Q52BKOPKOA0,2571,2703,384-174-842-632,25.0,1280,720,train +Q52BKOPKOA0,2845,3063,341-126-856-641,25.0,1280,720,train +Q52BKOPKOA0,3104,3288,514-144-996-626,25.0,1280,720,train +Q52BKOPKOA0,3289,3519,458-144-951-637,25.0,1280,720,train +Q52BKOPKOA0,3631,3762,716-160-1165-609,25.0,1280,720,train +Q52BKOPKOA0,3972,4138,531-112-1096-677,25.0,1280,720,train +Q52BKOPKOA0,4138,4281,547-70-1158-681,25.0,1280,720,train +MEPsADK9Vmk,466,703,170-167-443-440,29.97,640,480,test +MEPsADK9Vmk,898,1426,191-137-502-448,29.97,640,480,test +MEPsADK9Vmk,1611,1887,241-115-572-446,29.97,640,480,test +MEPsADK9Vmk,2049,2189,157-127-491-461,29.97,640,480,test +MEPsADK9Vmk,2208,2422,198-169-483-454,29.97,640,480,test +MEPsADK9Vmk,2585,2786,110-119-447-456,29.97,640,480,test +MEPsADK9Vmk,2786,2966,166-127-491-452,29.97,640,480,test +MEPsADK9Vmk,4351,4602,204-133-517-446,29.97,640,480,test +MEPsADK9Vmk,4654,4834,142-144-434-436,29.97,640,480,test +9b1jcHFhVKg,588,727,147-107-476-436,25.0,600,480,train +9b1jcHFhVKg,820,1133,129-89-491-451,25.0,600,480,train +9b1jcHFhVKg,2160,2321,167-85-525-443,25.0,600,480,train +9b1jcHFhVKg,2322,2488,152-82-522-452,25.0,600,480,train +9b1jcHFhVKg,2489,2688,145-94-502-451,25.0,600,480,train +9b1jcHFhVKg,2690,3649,132-93-491-452,25.0,600,480,train +9b1jcHFhVKg,3655,3919,88-9-529-450,25.0,600,480,train +9b1jcHFhVKg,4429,4616,180-85-542-447,25.0,600,480,train +9b1jcHFhVKg,5354,6378,130-96-481-447,25.0,600,480,train +9b1jcHFhVKg,6378,7402,132-89-491-448,25.0,600,480,train +9b1jcHFhVKg,7402,7550,129-89-493-453,25.0,600,480,train +9b1jcHFhVKg,7560,7840,133-90-490-447,25.0,600,480,train +9b1jcHFhVKg,7938,8115,133-89-501-457,25.0,600,480,train +9b1jcHFhVKg,8238,8370,134-90-493-449,25.0,600,480,train +9b1jcHFhVKg,8370,8687,134-90-493-449,25.0,600,480,train +9b1jcHFhVKg,8688,9213,115-50-517-452,25.0,600,480,train +9b1jcHFhVKg,9213,9614,91-11-532-452,25.0,600,480,train +oEtFlwUMyvY,2897,3921,118-45-525-452,29.97,640,480,train +oEtFlwUMyvY,3921,4896,113-52-513-452,29.97,640,480,train +oEtFlwUMyvY,5677,5828,113-49-524-460,29.97,640,480,train +oEtFlwUMyvY,5844,6125,108-18-550-460,29.97,640,480,train +FpLOIJJMXhg,117,652,98-4-561-467,25.0,640,480,train +FpLOIJJMXhg,665,1505,115-32-561-478,25.0,640,480,train +FpLOIJJMXhg,1731,2326,81-25-509-453,25.0,640,480,train +FpLOIJJMXhg,2363,2588,77-21-519-463,25.0,640,480,train +FpLOIJJMXhg,2661,3274,57-2-523-468,25.0,640,480,train +FpLOIJJMXhg,3596,3769,177-126-506-455,25.0,640,480,train +FpLOIJJMXhg,4268,4552,103-85-472-454,25.0,640,480,train +FpLOIJJMXhg,4939,5673,89-68-478-457,25.0,640,480,train +FpLOIJJMXhg,6731,7031,94-63-498-467,25.0,640,480,train +nUDf0Gv_GJ4,530,1554,0-73-334-408,25.0,640,480,train +nUDf0Gv_GJ4,1638,1808,34-97-343-406,25.0,640,480,train +nUDf0Gv_GJ4,1914,2285,9-102-312-405,25.0,640,480,train +nUDf0Gv_GJ4,2424,3169,44-95-356-407,25.0,640,480,train +nUDf0Gv_GJ4,3461,3816,4-97-320-413,25.0,640,480,train +nUDf0Gv_GJ4,3909,4148,10-104-315-409,25.0,640,480,train +nUDf0Gv_GJ4,4148,4625,10-90-330-410,25.0,640,480,train +nUDf0Gv_GJ4,4724,5514,10-86-339-415,25.0,640,480,train +nUDf0Gv_GJ4,5648,5900,48-97-369-418,25.0,640,480,train +nUDf0Gv_GJ4,5947,6692,12-64-373-425,25.0,640,480,train +nUDf0Gv_GJ4,6786,6960,3-81-347-425,25.0,640,480,train +nUDf0Gv_GJ4,6960,7198,2-64-371-433,25.0,640,480,train +nUDf0Gv_GJ4,7290,7475,94-72-450-428,25.0,640,480,train +nUDf0Gv_GJ4,7689,7890,79-78-436-435,25.0,640,480,train +nUDf0Gv_GJ4,7890,8044,27-80-373-426,25.0,640,480,train +nUDf0Gv_GJ4,8044,8310,55-62-418-425,25.0,640,480,train +QBqpVrjyeYM,3660,3843,146-118-499-471,25.0,640,480,train +QBqpVrjyeYM,4690,5063,186-181-476-471,25.0,640,480,train +QBqpVrjyeYM,6048,6213,194-188-481-475,25.0,640,480,train +QBqpVrjyeYM,6216,6347,194-195-462-463,25.0,640,480,train +AdEkWqdDlp8,0,216,1072-335-1762-1025,25.0,1920,1080,train +AdEkWqdDlp8,292,749,1093-301-1807-1015,25.0,1920,1080,train +AdEkWqdDlp8,1203,2227,234-261-1046-1073,25.0,1920,1080,train +AdEkWqdDlp8,2227,3251,331-260-1123-1052,25.0,1920,1080,train +AdEkWqdDlp8,3251,3610,409-307-1107-1005,25.0,1920,1080,train +AdEkWqdDlp8,3659,3891,553-314-1197-958,25.0,1920,1080,train +AdEkWqdDlp8,3920,4110,408-302-1107-1001,25.0,1920,1080,train +AdEkWqdDlp8,4110,4342,631-323-1293-985,25.0,1920,1080,train +AdEkWqdDlp8,4342,5056,658-290-1350-982,25.0,1920,1080,train +JdiIQg47Wc4,835,1096,173-195-1000-1022,25.0,1920,1080,train +JdiIQg47Wc4,2117,2248,63-141-981-1059,25.0,1920,1080,train +JdiIQg47Wc4,3301,3563,243-73-1183-1013,25.0,1920,1080,train +JdiIQg47Wc4,3603,3733,329-191-1171-1033,25.0,1920,1080,train +JdiIQg47Wc4,3801,3959,160-152-1049-1041,25.0,1920,1080,train +aOZmneMZICQ,0,219,257-120-540-403,24.0,854,480,train +aOZmneMZICQ,291,1256,289-139-556-406,24.0,854,480,train +aOZmneMZICQ,1256,1919,292-125-573-406,24.0,854,480,train +aOZmneMZICQ,1919,2496,298-108-598-408,24.0,854,480,train +aOZmneMZICQ,2496,2767,284-116-574-406,24.0,854,480,train +aOZmneMZICQ,3246,3456,288-129-561-402,24.0,854,480,train +aOZmneMZICQ,3480,3677,273-80-593-400,24.0,854,480,train +aOZmneMZICQ,3752,3951,282-122-566-406,24.0,854,480,train +aOZmneMZICQ,3980,4124,251-56-601-406,24.0,854,480,train +aOZmneMZICQ,4257,4452,281-120-564-403,24.0,854,480,train +aOZmneMZICQ,4472,4864,261-71-594-404,24.0,854,480,train +aOZmneMZICQ,4880,5180,284-102-585-403,24.0,854,480,train +aOZmneMZICQ,5180,5334,269-87-585-403,24.0,854,480,train +aOZmneMZICQ,5338,6362,245-49-605-409,24.0,854,480,train +aOZmneMZICQ,6413,6809,303-116-590-403,24.0,854,480,train +RCiy2FYViEg,382,1051,185-80-511-406,29.97,640,480,train +RCiy2FYViEg,1740,2161,178-76-502-400,29.97,640,480,train +RCiy2FYViEg,2238,2446,194-65-506-377,29.97,640,480,train +RCiy2FYViEg,3171,3310,208-84-510-386,29.97,640,480,train +RCiy2FYViEg,4437,4805,200-97-477-374,29.97,640,480,train +67If7DVAWNE,1170,1327,142-51-549-458,30.0,640,480,train +67If7DVAWNE,1327,1472,136-38-562-464,30.0,640,480,train +67If7DVAWNE,1472,2005,141-41-564-464,30.0,640,480,train +gaccfn5JB4Y,1713,1845,169-28-566-425,29.97,640,480,test +gaccfn5JB4Y,7950,8100,90-20-497-427,29.97,640,480,test +VBevstnH4Ds,482,738,184-126-459-401,25.0,640,480,train +VBevstnH4Ds,1057,1578,168-92-475-399,25.0,640,480,train +VBevstnH4Ds,1700,2163,165-90-475-400,25.0,640,480,train +VBevstnH4Ds,2269,2688,168-96-473-401,25.0,640,480,train +VBevstnH4Ds,2702,3267,152-64-491-403,25.0,640,480,train +VBevstnH4Ds,3390,4414,158-76-483-401,25.0,640,480,train +VBevstnH4Ds,4414,5438,186-124-456-394,25.0,640,480,train +VBevstnH4Ds,5438,5759,191-130-459-398,25.0,640,480,train +VBevstnH4Ds,5759,6783,186-112-476-402,25.0,640,480,train +VBevstnH4Ds,6783,7807,181-129-454-402,25.0,640,480,train +VBevstnH4Ds,7807,8583,181-123-455-397,25.0,640,480,train +gDF0gW-jo9E,635,829,382-105-903-626,29.97,1280,720,train +gDF0gW-jo9E,830,977,379-102-902-625,29.97,1280,720,train +gDF0gW-jo9E,1068,1261,365-73-914-622,29.97,1280,720,train +gDF0gW-jo9E,1497,1685,251-115-812-676,29.97,1280,720,train +gDF0gW-jo9E,2196,2402,321-115-834-628,29.97,1280,720,train +gDF0gW-jo9E,2547,2830,335-78-869-612,29.97,1280,720,train +gDF0gW-jo9E,2830,2963,199-48-750-599,29.97,1280,720,train +gDF0gW-jo9E,3252,3451,138-49-793-704,29.97,1280,720,train +gDF0gW-jo9E,3451,3681,109-49-765-705,29.97,1280,720,train +gDF0gW-jo9E,3681,3879,123-54-768-699,29.97,1280,720,train +gDF0gW-jo9E,3904,4333,152-74-752-674,29.97,1280,720,train +gDF0gW-jo9E,4336,4520,241-82-788-629,29.97,1280,720,train +gDF0gW-jo9E,4520,4706,231-68-790-627,29.97,1280,720,train +gDF0gW-jo9E,4729,4917,194-97-757-660,29.97,1280,720,train +gDF0gW-jo9E,5663,5864,274-97-863-686,29.97,1280,720,train +gDF0gW-jo9E,6151,6473,344-80-941-677,29.97,1280,720,train +IXMiGVYeTJ4,934,1146,149-124-418-393,29.97,640,480,train +IXMiGVYeTJ4,1164,1293,131-128-393-390,29.97,640,480,train +IXMiGVYeTJ4,2749,2926,106-123-381-398,29.97,640,480,train +IXMiGVYeTJ4,3645,3833,147-130-407-390,29.97,640,480,train +IXMiGVYeTJ4,4400,4553,204-131-470-397,29.97,640,480,train +IXMiGVYeTJ4,4557,4727,179-120-456-397,29.97,640,480,train +IXMiGVYeTJ4,5128,5438,154-101-448-395,29.97,640,480,train +5Z8D9WwcETQ,124,357,368-103-847-582,25.0,1280,720,train +5Z8D9WwcETQ,357,500,371-107-846-582,25.0,1280,720,train +5Z8D9WwcETQ,979,1228,378-79-872-573,25.0,1280,720,train +5Z8D9WwcETQ,3241,3390,284-86-810-612,25.0,1280,720,train +5Z8D9WwcETQ,3795,3937,235-22-842-629,25.0,1280,720,train +5Z8D9WwcETQ,3942,4165,158-15-812-669,25.0,1280,720,train +5Z8D9WwcETQ,5695,5947,306-0-974-668,25.0,1280,720,train +5Z8D9WwcETQ,5995,6145,141-6-760-625,25.0,1280,720,train +5Z8D9WwcETQ,10158,10356,276-20-844-588,25.0,1280,720,train +5Z8D9WwcETQ,10530,10835,366-34-959-627,25.0,1280,720,train +q1KCN0rDiAE,0,406,604-254-1172-822,25.0,1920,1080,train +q1KCN0rDiAE,406,898,658-267-1222-831,25.0,1920,1080,train +q1KCN0rDiAE,898,1105,535-307-1046-818,25.0,1920,1080,train +q1KCN0rDiAE,1413,1616,469-320-1002-853,25.0,1920,1080,train +q1KCN0rDiAE,1616,1772,547-292-1136-881,25.0,1920,1080,train +q1KCN0rDiAE,1772,2022,577-260-1188-871,25.0,1920,1080,train +q1KCN0rDiAE,2228,2475,415-304-982-871,25.0,1920,1080,train +q1KCN0rDiAE,2558,2771,412-310-980-878,25.0,1920,1080,train +q1KCN0rDiAE,2987,3151,250-177-941-868,25.0,1920,1080,train +q1KCN0rDiAE,3175,3366,263-293-938-968,25.0,1920,1080,train +q1KCN0rDiAE,4387,4893,703-292-1274-863,25.0,1920,1080,train +q1KCN0rDiAE,7136,7376,622-341-1098-817,25.0,1920,1080,train +q1KCN0rDiAE,7387,7523,671-304-1219-852,25.0,1920,1080,train +q1KCN0rDiAE,7523,7733,676-272-1241-837,25.0,1920,1080,train +q1KCN0rDiAE,7733,7876,588-306-1119-837,25.0,1920,1080,train +4rknb1u9kAQ,513,1091,258-111-788-641,29.97,1280,720,train +4rknb1u9kAQ,1292,1580,226-163-724-661,29.97,1280,720,train +4rknb1u9kAQ,2059,2348,310-141-842-673,29.97,1280,720,train +4rknb1u9kAQ,2607,2789,399-169-877-647,29.97,1280,720,train +4rknb1u9kAQ,2900,3041,435-146-955-666,29.97,1280,720,train +4rknb1u9kAQ,3537,3683,384-125-920-661,29.97,1280,720,train +4rknb1u9kAQ,4230,4481,608-146-1083-621,29.97,1280,720,train +4rknb1u9kAQ,4505,4661,562-121-1033-592,29.97,1280,720,train +4rknb1u9kAQ,5879,6017,446-134-938-626,29.97,1280,720,train +4rknb1u9kAQ,6622,6907,171-58-810-697,29.97,1280,720,train +4rknb1u9kAQ,8458,8625,220-150-726-656,29.97,1280,720,train +4rknb1u9kAQ,13070,13211,297-192-714-609,29.97,1280,720,train +4rknb1u9kAQ,13237,13385,206-164-679-637,29.97,1280,720,train +4rknb1u9kAQ,13449,13691,276-182-706-612,29.97,1280,720,train +4rknb1u9kAQ,13887,14183,282-140-753-611,29.97,1280,720,train +gmSa_wwIhfE,4297,4451,136-10-590-464,29.97,640,480,train +gmSa_wwIhfE,4734,4885,65-30-513-478,29.97,640,480,train +gmSa_wwIhfE,4904,5032,139-53-553-467,29.97,640,480,train +gmSa_wwIhfE,5033,5218,145-49-550-454,29.97,640,480,train +gmSa_wwIhfE,5550,5727,164-66-563-465,29.97,640,480,train +gmSa_wwIhfE,9143,9366,214-52-611-449,29.97,640,480,train +gmSa_wwIhfE,10057,10203,177-37-587-447,29.97,640,480,train +gmSa_wwIhfE,11205,11372,183-82-546-445,29.97,640,480,train +gmSa_wwIhfE,11524,11672,184-66-568-450,29.97,640,480,train +WlDYrq8K6nk,2253,2686,260-18-938-696,29.97,1280,720,test +WlDYrq8K6nk,2959,3094,353-46-990-683,29.97,1280,720,test +WlDYrq8K6nk,3247,3381,324-52-980-708,29.97,1280,720,test +WlDYrq8K6nk,3984,4245,341-116-938-713,29.97,1280,720,test +WlDYrq8K6nk,4875,5151,389-9-1096-716,29.97,1280,720,test +WlDYrq8K6nk,5264,5403,297-17-959-679,29.97,1280,720,test +WlDYrq8K6nk,5943,6135,261-23-942-704,29.97,1280,720,test +WlDYrq8K6nk,7058,7241,337-4-1009-676,29.97,1280,720,test +WlDYrq8K6nk,8186,8512,276-7-974-705,29.97,1280,720,test +WlDYrq8K6nk,8520,8777,326-75-953-702,29.97,1280,720,test +y4egTMS9H1k,382,640,107-62-963-918,25.0,1920,1080,train +y4egTMS9H1k,640,917,146-51-1032-937,25.0,1920,1080,train +y4egTMS9H1k,978,1117,443-111-1272-940,25.0,1920,1080,train +y4egTMS9H1k,1139,1357,579-120-1453-994,25.0,1920,1080,train +y4egTMS9H1k,3217,3350,507-24-1464-981,25.0,1920,1080,train +y4egTMS9H1k,3469,3730,353-37-1308-992,25.0,1920,1080,train +y4egTMS9H1k,4163,4389,519-45-1426-952,25.0,1920,1080,train +y4egTMS9H1k,4612,4800,532-60-1428-956,25.0,1920,1080,train +y4egTMS9H1k,4800,5009,492-16-1437-961,25.0,1920,1080,train +y4egTMS9H1k,5330,5476,192-123-1038-969,25.0,1920,1080,train +y4egTMS9H1k,5508,5704,123-167-910-954,25.0,1920,1080,train +y4egTMS9H1k,5922,6063,322-93-1246-1017,25.0,1920,1080,train +y4egTMS9H1k,6103,6370,410-87-1372-1049,25.0,1920,1080,train +y4egTMS9H1k,6370,6514,700-137-1517-954,25.0,1920,1080,train +y4egTMS9H1k,7018,7169,782-129-1677-1024,25.0,1920,1080,train +y4egTMS9H1k,7388,7705,874-46-1869-1041,25.0,1920,1080,train +y4egTMS9H1k,8168,8332,433-46-1357-970,25.0,1920,1080,train +y4egTMS9H1k,8944,9083,431-95-1295-959,25.0,1920,1080,train +y4egTMS9H1k,9083,9280,476-52-1369-945,25.0,1920,1080,train +y4egTMS9H1k,9280,9458,484-138-1335-989,25.0,1920,1080,train +y4egTMS9H1k,9539,9751,664-141-1434-911,25.0,1920,1080,train +y4egTMS9H1k,9751,9943,572-104-1363-895,25.0,1920,1080,train +y4egTMS9H1k,10173,10417,256-80-1165-989,25.0,1920,1080,train +y4egTMS9H1k,10452,10675,298-104-1181-987,25.0,1920,1080,train +y4egTMS9H1k,10706,10835,512-109-1449-1046,25.0,1920,1080,train +y4egTMS9H1k,10859,11061,609-129-1527-1047,25.0,1920,1080,train +y4egTMS9H1k,11342,11706,595-24-1616-1045,25.0,1920,1080,train +P9nWs-Haf_I,267,1291,184-140-490-446,25.0,648,480,train +P9nWs-Haf_I,1599,1740,194-154-489-449,25.0,648,480,train +P9nWs-Haf_I,1850,2113,199-140-514-455,25.0,648,480,train +4a0Gro197W4,23,312,64-3-527-466,29.97,640,480,train +4a0Gro197W4,2755,2910,186-37-530-381,29.97,640,480,train +4a0Gro197W4,3130,3285,138-13-515-390,29.97,640,480,train +eQqUScz9rLw,76,580,165-101-468-404,25.0,640,480,train +eQqUScz9rLw,580,904,132-123-426-417,25.0,640,480,train +eQqUScz9rLw,2620,2923,124-72-489-437,25.0,640,480,train +eQqUScz9rLw,3587,3735,155-71-526-442,25.0,640,480,train +jVIhp_8EWIE,302,524,115-36-497-418,29.97,640,480,train +jVIhp_8EWIE,4912,5048,215-59-515-359,29.97,640,480,train +Jgx22uN5xzQ,249,600,267-111-558-402,25.0,640,480,train +Jgx22uN5xzQ,951,1200,134-124-415-405,25.0,640,480,train +Jgx22uN5xzQ,1261,1391,133-132-403-402,25.0,640,480,train +Jgx22uN5xzQ,1527,1725,125-134-389-398,25.0,640,480,train +Jgx22uN5xzQ,2151,2387,120-131-379-390,25.0,640,480,train +Jgx22uN5xzQ,2410,2675,175-127-443-395,25.0,640,480,train +Jgx22uN5xzQ,2887,3311,17-28-455-466,25.0,640,480,train +Jgx22uN5xzQ,3436,3605,43-80-423-460,25.0,640,480,train +Jgx22uN5xzQ,4505,4714,105-82-502-479,25.0,640,480,train +rjfmhJ6GCwM,1218,1634,315-46-983-714,29.97,1280,720,train +rjfmhJ6GCwM,1801,2000,383-51-1049-717,29.97,1280,720,train +rjfmhJ6GCwM,2188,2406,316-69-940-693,29.97,1280,720,train +rjfmhJ6GCwM,2976,3114,360-113-911-664,29.97,1280,720,train +rjfmhJ6GCwM,3225,3430,406-62-1025-681,29.97,1280,720,train +rjfmhJ6GCwM,6472,6687,347-92-951-696,29.97,1280,720,train +rjfmhJ6GCwM,6689,6881,354-104-950-700,29.97,1280,720,train +rjfmhJ6GCwM,7235,7685,343-43-990-690,29.97,1280,720,train +P0aQKRrvpt8,1107,1244,117-132-383-398,25.0,640,480,train +P0aQKRrvpt8,3501,3681,119-87-407-375,25.0,640,480,train +S_aTWQUxylE,162,566,131-17-573-459,25.0,640,480,train +qVoJJj13PaY,1167,1486,116-42-518-444,29.97,640,480,train +qVoJJj13PaY,6070,6213,150-74-523-447,29.97,640,480,train +qV6PjN55Lb4,785,941,423-68-1345-990,29.97,1920,1080,train +qV6PjN55Lb4,946,1875,425-73-1343-991,29.97,1920,1080,train +qV6PjN55Lb4,1949,2088,725-205-1498-978,29.97,1920,1080,train +qV6PjN55Lb4,2373,2706,582-118-1452-988,29.97,1920,1080,train +qV6PjN55Lb4,2727,3057,466-21-1443-998,29.97,1920,1080,train +qV6PjN55Lb4,3251,3419,727-173-1508-954,29.97,1920,1080,train +qV6PjN55Lb4,3566,3742,721-127-1607-1013,29.97,1920,1080,train +qV6PjN55Lb4,3905,4135,607-109-1467-969,29.97,1920,1080,train +qV6PjN55Lb4,4135,4296,387-167-1208-988,29.97,1920,1080,train +qV6PjN55Lb4,4336,4491,466-162-1289-985,29.97,1920,1080,train +qV6PjN55Lb4,4491,4876,437-97-1333-993,29.97,1920,1080,train +qV6PjN55Lb4,6370,6521,497-194-1254-951,29.97,1920,1080,train +qV6PjN55Lb4,7456,7733,615-179-1389-953,29.97,1920,1080,train +qV6PjN55Lb4,7944,8378,546-146-1336-936,29.97,1920,1080,train +qV6PjN55Lb4,8681,8902,708-113-1516-921,29.97,1920,1080,train +qV6PjN55Lb4,8924,9094,763-107-1572-916,29.97,1920,1080,train +qV6PjN55Lb4,9582,9794,458-53-1401-996,29.97,1920,1080,train +qV6PjN55Lb4,10285,10464,653-140-1488-975,29.97,1920,1080,train +qV6PjN55Lb4,10464,10594,515-175-1337-997,29.97,1920,1080,train +qV6PjN55Lb4,10769,10965,620-128-1487-995,29.97,1920,1080,train +qV6PjN55Lb4,11040,11178,438-121-1326-1009,29.97,1920,1080,train +qV6PjN55Lb4,11350,11534,512-146-1381-1015,29.97,1920,1080,train +qV6PjN55Lb4,11680,12002,534-65-1495-1026,29.97,1920,1080,train +mAmvStH5k4g,3132,3284,150-38-554-442,29.97,640,480,train +mAmvStH5k4g,3765,3971,173-34-592-453,29.97,640,480,train +mAmvStH5k4g,4096,4426,140-40-555-455,29.97,640,480,train +mAmvStH5k4g,4471,4866,71-36-507-472,29.97,640,480,train +mAmvStH5k4g,5670,5820,119-44-539-464,29.97,640,480,train +pbznqs21OvQ,577,1601,311-101-794-584,50.0,1280,720,train +pbznqs21OvQ,1684,2013,418-130-873-585,50.0,1280,720,train +pbznqs21OvQ,2070,2226,259-120-742-603,50.0,1280,720,train +pbznqs21OvQ,2278,2486,284-169-752-637,50.0,1280,720,train +pbznqs21OvQ,2567,3591,302-98-884-680,50.0,1280,720,train +pbznqs21OvQ,3867,4371,181-152-687-658,50.0,1280,720,train +pbznqs21OvQ,4484,4685,295-174-782-661,50.0,1280,720,train +pbznqs21OvQ,4733,5022,164-106-697-639,50.0,1280,720,train +pbznqs21OvQ,5022,5166,184-114-737-667,50.0,1280,720,train +pbznqs21OvQ,5166,5341,101-83-671-653,50.0,1280,720,train +pbznqs21OvQ,5631,5785,305-178-785-658,50.0,1280,720,train +pbznqs21OvQ,5785,5946,302-151-814-663,50.0,1280,720,train +pbznqs21OvQ,6089,6377,342-151-846-655,50.0,1280,720,train +pbznqs21OvQ,6811,6967,487-17-1121-651,50.0,1280,720,train +pbznqs21OvQ,6967,7157,488-110-1057-679,50.0,1280,720,train +pbznqs21OvQ,7540,7724,436-16-1139-719,50.0,1280,720,train +pbznqs21OvQ,8385,9137,220-97-817-694,50.0,1280,720,train +pbznqs21OvQ,9137,10050,123-71-679-627,50.0,1280,720,train +pbznqs21OvQ,10050,10519,253-147-726-620,50.0,1280,720,train +pbznqs21OvQ,10879,11158,451-156-910-615,50.0,1280,720,train +pbznqs21OvQ,11158,11595,445-134-916-605,50.0,1280,720,train +pbznqs21OvQ,11867,12075,287-145-803-661,50.0,1280,720,train +pbznqs21OvQ,12075,12266,267-119-806-658,50.0,1280,720,train +pbznqs21OvQ,12312,13142,267-94-840-667,50.0,1280,720,train +pbznqs21OvQ,13295,13877,286-72-907-693,50.0,1280,720,train +pbznqs21OvQ,13877,14421,253-43-900-690,50.0,1280,720,train +N8yfhFc25c8,0,1024,454-277-1173-996,29.97,1920,1080,train +N8yfhFc25c8,1557,1836,668-339-1312-983,29.97,1920,1080,train +N8yfhFc25c8,1836,1995,558-329-1224-995,29.97,1920,1080,train +N8yfhFc25c8,2082,2265,497-321-1166-990,29.97,1920,1080,train +N8yfhFc25c8,2309,2876,669-318-1338-987,29.97,1920,1080,train +N8yfhFc25c8,3000,3524,653-341-1309-997,29.97,1920,1080,train +N8yfhFc25c8,3524,4357,528-342-1193-1007,29.97,1920,1080,train +N8yfhFc25c8,4669,4863,537-350-1180-993,29.97,1920,1080,train +N8yfhFc25c8,4878,5546,660-287-1347-974,29.97,1920,1080,train +Bm82HLNjkKw,241,830,166-223-422-479,30.0,640,480,train +Bm82HLNjkKw,1528,2552,165-202-443-480,30.0,640,480,train +Bm82HLNjkKw,6236,7260,181-203-457-479,30.0,640,480,train +Bm82HLNjkKw,7338,7690,182-201-461-480,30.0,640,480,train +MvupxvDqZ9M,155,1104,106-6-563-463,29.97,640,480,train +MvupxvDqZ9M,1905,2823,137-5-579-447,29.97,640,480,train +MvupxvDqZ9M,3202,3832,120-13-558-451,29.97,640,480,train +MvupxvDqZ9M,4250,4436,100-6-535-441,29.97,640,480,train +MvupxvDqZ9M,4436,5020,95-5-555-465,29.97,640,480,train +MvupxvDqZ9M,5709,6733,103-26-532-455,29.97,640,480,train +MvupxvDqZ9M,6972,7811,141-16-580-455,29.97,640,480,train +MvupxvDqZ9M,8962,9544,165-21-609-465,29.97,640,480,train +MvupxvDqZ9M,9566,10120,123-24-555-456,29.97,640,480,train +MvupxvDqZ9M,10463,11410,89-10-536-457,29.97,640,480,train +MvupxvDqZ9M,11410,11671,94-32-517-455,29.97,640,480,train +MvupxvDqZ9M,11759,12308,102-10-548-456,29.97,640,480,train +VhprHat04dk,1321,1459,458-125-1287-954,29.97,1920,1080,train +VhprHat04dk,4550,4740,426-96-1261-931,29.97,1920,1080,train +mtXOJ5vBkLw,180,497,215-45-620-450,29.97,854,480,train +mtXOJ5vBkLw,1302,1449,292-94-660-462,29.97,854,480,train +mtXOJ5vBkLw,4527,4703,247-70-637-460,29.97,854,480,train +NjAu1n_-r0A,318,596,635-329-1359-1053,25.0,1920,1080,train +NjAu1n_-r0A,596,945,625-291-1383-1049,25.0,1920,1080,train +NjAu1n_-r0A,1043,1176,621-326-1328-1033,25.0,1920,1080,train +NjAu1n_-r0A,1208,1476,536-166-1419-1049,25.0,1920,1080,train +NjAu1n_-r0A,1530,1701,638-310-1377-1049,25.0,1920,1080,train +NjAu1n_-r0A,1736,1943,538-344-1233-1039,25.0,1920,1080,train +NjAu1n_-r0A,2095,2297,650-283-1407-1040,25.0,1920,1080,train +NjAu1n_-r0A,2363,2548,602-289-1353-1040,25.0,1920,1080,train +NjAu1n_-r0A,2800,2953,535-381-1183-1029,25.0,1920,1080,train +NjAu1n_-r0A,3220,3412,742-288-1481-1027,25.0,1920,1080,train +NjAu1n_-r0A,3493,3701,640-345-1332-1037,25.0,1920,1080,train +NjAu1n_-r0A,4925,5081,797-329-1519-1051,25.0,1920,1080,train +NjAu1n_-r0A,5155,5355,617-196-1481-1060,25.0,1920,1080,train +NjAu1n_-r0A,5674,6187,594-155-1499-1060,25.0,1920,1080,train +NjAu1n_-r0A,6817,7302,641-282-1398-1039,25.0,1920,1080,train +NjAu1n_-r0A,7741,8169,583-260-1355-1032,25.0,1920,1080,train +NjAu1n_-r0A,8170,8311,595-285-1353-1043,25.0,1920,1080,train +NjAu1n_-r0A,8355,8749,517-158-1413-1054,25.0,1920,1080,train +NjAu1n_-r0A,8977,9148,539-182-1406-1049,25.0,1920,1080,train +NjAu1n_-r0A,9318,9468,487-349-1192-1054,25.0,1920,1080,train +NjAu1n_-r0A,10494,10665,557-293-1292-1028,25.0,1920,1080,train +wtFqCHVGA44,2596,2867,199-24-648-473,29.97,854,480,train +wtFqCHVGA44,3700,3904,304-35-693-424,29.97,854,480,train +wtFqCHVGA44,3904,4136,302-20-698-416,29.97,854,480,train +wtFqCHVGA44,4311,4536,352-50-752-450,29.97,854,480,train +wtFqCHVGA44,4536,4849,325-30-755-460,29.97,854,480,train +wtFqCHVGA44,4849,5250,334-42-738-446,29.97,854,480,train +wtFqCHVGA44,5250,5428,333-52-726-445,29.97,854,480,train +wtFqCHVGA44,5428,5631,339-34-726-421,29.97,854,480,train +wtFqCHVGA44,5855,6021,290-36-704-450,29.97,854,480,train +wtFqCHVGA44,6021,6623,279-47-702-470,29.97,854,480,train +wtFqCHVGA44,6641,6810,241-23-667-449,29.97,854,480,train +wtFqCHVGA44,8312,8460,222-4-688-470,29.97,854,480,train +wtFqCHVGA44,8469,9108,189-27-627-465,29.97,854,480,train +wtFqCHVGA44,9108,9262,181-7-607-433,29.97,854,480,train +P4vKV-G3hGE,108,351,573-307-1290-1024,29.97,1920,1080,train +P4vKV-G3hGE,440,991,635-223-1341-929,29.97,1920,1080,train +P4vKV-G3hGE,1024,1225,553-285-1226-958,29.97,1920,1080,train +P4vKV-G3hGE,1225,1410,493-325-1156-988,29.97,1920,1080,train +P4vKV-G3hGE,1410,1616,424-293-1115-984,29.97,1920,1080,train +P4vKV-G3hGE,1830,2163,415-337-1090-1012,29.97,1920,1080,train +P4vKV-G3hGE,2805,3003,383-128-1122-867,29.97,1920,1080,train +P4vKV-G3hGE,3071,3458,394-135-1153-894,29.97,1920,1080,train +P4vKV-G3hGE,3541,3759,439-121-1186-868,29.97,1920,1080,train +P4vKV-G3hGE,3814,4012,321-52-1063-794,29.97,1920,1080,train +P4vKV-G3hGE,4592,4973,503-160-1239-896,29.97,1920,1080,train +P4vKV-G3hGE,6213,6361,357-138-1198-979,29.97,1920,1080,train +P4vKV-G3hGE,6381,6654,396-189-1245-1038,29.97,1920,1080,train +P4vKV-G3hGE,6877,7122,375-180-1215-1020,29.97,1920,1080,train +P4vKV-G3hGE,7122,7250,238-172-1087-1021,29.97,1920,1080,train +P4vKV-G3hGE,7251,7541,190-146-1030-986,29.97,1920,1080,train +P4vKV-G3hGE,7782,8012,208-67-1179-1038,29.97,1920,1080,train +P4vKV-G3hGE,8735,8939,472-124-1397-1049,29.97,1920,1080,train +P4vKV-G3hGE,8939,9583,200-172-1071-1043,29.97,1920,1080,train +P4vKV-G3hGE,9707,9896,331-115-1149-933,29.97,1920,1080,train +P4vKV-G3hGE,10209,10491,440-116-1296-972,29.97,1920,1080,train +P4vKV-G3hGE,10529,10754,361-178-1168-985,29.97,1920,1080,train +P4vKV-G3hGE,10953,11209,23-165-920-1062,29.97,1920,1080,train +P4vKV-G3hGE,11209,11419,86-122-998-1034,29.97,1920,1080,train +P4vKV-G3hGE,11505,11735,204-111-1154-1061,29.97,1920,1080,train +P4vKV-G3hGE,11841,12014,337-119-1256-1038,29.97,1920,1080,train +P4vKV-G3hGE,12014,12430,272-124-1220-1072,29.97,1920,1080,train +P4vKV-G3hGE,12871,13029,253-103-1226-1076,29.97,1920,1080,train +P4vKV-G3hGE,13615,13753,270-91-1256-1077,29.97,1920,1080,train +P4vKV-G3hGE,13912,14056,199-96-1183-1080,29.97,1920,1080,train +P4vKV-G3hGE,14659,14813,107-57-1119-1069,29.97,1920,1080,train +P4vKV-G3hGE,15277,15599,133-17-1142-1026,29.97,1920,1080,train +P4vKV-G3hGE,15744,15999,254-31-1234-1011,29.97,1920,1080,train +P4vKV-G3hGE,16759,16906,114-48-955-889,29.97,1920,1080,train +P4vKV-G3hGE,18069,18488,99-98-969-968,29.97,1920,1080,train +P4vKV-G3hGE,19055,19352,253-139-1085-971,29.97,1920,1080,train +P4vKV-G3hGE,19352,19586,177-183-965-971,29.97,1920,1080,train +P4vKV-G3hGE,19890,20019,106-96-855-845,29.97,1920,1080,train +P4vKV-G3hGE,20160,20364,422-68-1213-859,29.97,1920,1080,train +P4vKV-G3hGE,20517,20810,423-165-1203-945,29.97,1920,1080,train +P4vKV-G3hGE,20851,20984,373-84-1201-912,29.97,1920,1080,train +P4vKV-G3hGE,21140,21382,117-63-949-895,29.97,1920,1080,train +P4vKV-G3hGE,21722,21912,174-142-982-950,29.97,1920,1080,train +8hLvlQrXI6U,7700,7984,132-38-546-452,29.97,640,480,test +8hLvlQrXI6U,8247,8392,155-43-560-448,29.97,640,480,test +8hLvlQrXI6U,8872,9001,97-24-536-463,29.97,640,480,test +Rrwo_EH5k30,0,348,524-251-1324-1051,29.97,1920,1080,train +Rrwo_EH5k30,348,1372,556-207-1406-1057,29.97,1920,1080,train +Rrwo_EH5k30,1372,2189,562-217-1388-1043,29.97,1920,1080,train +Rrwo_EH5k30,2203,2561,408-60-1418-1070,29.97,1920,1080,train +Rrwo_EH5k30,2622,3069,463-95-1432-1064,29.97,1920,1080,train +Rrwo_EH5k30,3069,3294,484-36-1510-1062,29.97,1920,1080,train +Rrwo_EH5k30,3423,4447,457-24-1499-1066,29.97,1920,1080,train +Rrwo_EH5k30,4508,5532,441-41-1469-1069,29.97,1920,1080,train +Rrwo_EH5k30,5708,6732,404-128-1343-1067,29.97,1920,1080,train +Rrwo_EH5k30,6732,7756,559-268-1339-1048,29.97,1920,1080,train +c3ORapk0EH8,101,253,654-322-1241-909,25.0,1920,1080,train +c3ORapk0EH8,388,747,386-317-1021-952,25.0,1920,1080,train +c3ORapk0EH8,747,1031,393-262-1091-960,25.0,1920,1080,train +c3ORapk0EH8,1073,1208,635-307-1293-965,25.0,1920,1080,train +c3ORapk0EH8,1286,1488,646-234-1440-1028,25.0,1920,1080,train +c3ORapk0EH8,1538,1784,864-252-1611-999,25.0,1920,1080,train +c3ORapk0EH8,1869,2027,1152-302-1877-1027,25.0,1920,1080,train +c3ORapk0EH8,2224,2546,718-300-1368-950,25.0,1920,1080,train +c3ORapk0EH8,2717,2886,1055-369-1559-873,25.0,1920,1080,train +c3ORapk0EH8,2886,3031,1132-355-1655-878,25.0,1920,1080,train +c3ORapk0EH8,3066,3206,1059-371-1534-846,25.0,1920,1080,train +c3ORapk0EH8,3228,3361,1007-371-1475-839,25.0,1920,1080,train +c3ORapk0EH8,3565,3709,977-380-1445-848,25.0,1920,1080,train +c3ORapk0EH8,4362,4568,572-244-1335-1007,25.0,1920,1080,train +c3ORapk0EH8,4568,4707,416-301-1138-1023,25.0,1920,1080,train +c3ORapk0EH8,4716,4958,491-196-1336-1041,25.0,1920,1080,train +c3ORapk0EH8,7026,7156,609-116-1547-1054,25.0,1920,1080,train +c3ORapk0EH8,10140,10328,258-89-1228-1059,25.0,1920,1080,train +c3ORapk0EH8,10447,10713,688-237-1468-1017,25.0,1920,1080,train +c3ORapk0EH8,10713,10856,775-200-1576-1001,25.0,1920,1080,train +c3ORapk0EH8,10856,11314,617-252-1339-974,25.0,1920,1080,train +c3ORapk0EH8,11314,11502,641-216-1402-977,25.0,1920,1080,train +c3ORapk0EH8,11502,11764,802-260-1514-972,25.0,1920,1080,train +c3ORapk0EH8,12636,12805,361-6-1424-1069,25.0,1920,1080,train +c3ORapk0EH8,20720,20904,347-110-1183-946,25.0,1920,1080,train +c3ORapk0EH8,20904,21110,606-159-1291-844,25.0,1920,1080,train +5HNVxg4Vwzc,1629,1825,318-19-1013-714,29.97,1280,720,train +5HNVxg4Vwzc,2312,2467,339-69-975-705,29.97,1280,720,train +5HNVxg4Vwzc,3628,3817,314-49-978-713,29.97,1280,720,train +5HNVxg4Vwzc,4936,5141,298-26-963-691,29.97,1280,720,train +5HNVxg4Vwzc,7606,7739,417-95-944-622,29.97,1280,720,train +5HNVxg4Vwzc,8400,8540,467-82-1022-637,29.97,1280,720,train +5HNVxg4Vwzc,13192,13408,450-112-989-651,29.97,1280,720,train +5HNVxg4Vwzc,13902,14081,428-95-1016-683,29.97,1280,720,train +5HNVxg4Vwzc,14081,14349,453-66-1024-637,29.97,1280,720,train +5HNVxg4Vwzc,17828,18041,388-41-1061-714,29.97,1280,720,train +5HNVxg4Vwzc,18320,18615,261-11-955-705,29.97,1280,720,train +PjpHBRjIjpo,359,1026,569-167-1419-1017,29.97,1920,1080,train +PjpHBRjIjpo,1500,1679,853-180-1739-1066,29.97,1920,1080,train +PjpHBRjIjpo,1730,2519,856-151-1783-1078,29.97,1920,1080,train +PjpHBRjIjpo,2759,3318,851-216-1652-1017,29.97,1920,1080,train +PjpHBRjIjpo,3318,3981,655-174-1467-986,29.97,1920,1080,train +PjpHBRjIjpo,4199,4439,590-203-1406-1019,29.97,1920,1080,train +PjpHBRjIjpo,5039,5212,574-153-1417-996,29.97,1920,1080,train +4rv26XZJJsc,947,1131,80-64-447-431,25.0,640,480,train +7KzG5fmhdRE,208,655,195-23-613-441,30.0,854,480,train +7KzG5fmhdRE,655,1255,157-3-603-449,30.0,854,480,train +7KzG5fmhdRE,1255,2075,135-29-579-473,30.0,854,480,train +7KzG5fmhdRE,2075,2370,116-5-568-457,30.0,854,480,train +7KzG5fmhdRE,2370,2781,84-27-534-477,30.0,854,480,train +7KzG5fmhdRE,2781,2972,217-35-623-441,30.0,854,480,train +7KzG5fmhdRE,2972,3461,214-16-635-437,30.0,854,480,train +7KzG5fmhdRE,3461,4485,201-25-633-457,30.0,854,480,train +7KzG5fmhdRE,4485,4898,204-37-612-445,30.0,854,480,train +7KzG5fmhdRE,4898,5559,321-45-719-443,30.0,854,480,train +7KzG5fmhdRE,5559,6212,367-26-789-448,30.0,854,480,train +7KzG5fmhdRE,7236,7375,468-41-850-423,30.0,854,480,train +eR67awnEM2Q,0,551,941-315-1695-1069,30.0,1920,1080,train +eR67awnEM2Q,552,1125,996-339-1700-1043,30.0,1920,1080,train +eR67awnEM2Q,1125,1455,833-339-1547-1053,30.0,1920,1080,train +eR67awnEM2Q,1455,1768,1004-325-1717-1038,30.0,1920,1080,train +eR67awnEM2Q,1768,1964,800-352-1468-1020,30.0,1920,1080,train +eR67awnEM2Q,1964,2219,621-333-1319-1031,30.0,1920,1080,train +eR67awnEM2Q,2220,2390,530-343-1194-1007,30.0,1920,1080,train +eR67awnEM2Q,2390,2738,420-304-1135-1019,30.0,1920,1080,train +eR67awnEM2Q,2861,3017,444-288-1204-1048,30.0,1920,1080,train +eR67awnEM2Q,3017,3308,438-249-1234-1045,30.0,1920,1080,train +eR67awnEM2Q,3362,3659,523-317-1251-1045,30.0,1920,1080,train +eR67awnEM2Q,3713,3853,618-382-1277-1041,30.0,1920,1080,train +eR67awnEM2Q,3918,4070,555-262-1343-1050,30.0,1920,1080,train +eR67awnEM2Q,4281,4518,620-312-1370-1062,30.0,1920,1080,train +eR67awnEM2Q,4664,5329,330-290-1071-1031,30.0,1920,1080,train +eR67awnEM2Q,5466,5627,293-309-1014-1030,30.0,1920,1080,train +eR67awnEM2Q,5724,5872,319-295-1042-1018,30.0,1920,1080,train +eR67awnEM2Q,5985,6233,557-313-1290-1046,30.0,1920,1080,train +eR67awnEM2Q,6233,6436,729-323-1450-1044,30.0,1920,1080,train +eR67awnEM2Q,6497,6657,875-317-1614-1056,30.0,1920,1080,train +eR67awnEM2Q,6731,6923,690-309-1427-1046,30.0,1920,1080,train +eR67awnEM2Q,7368,7703,600-314-1320-1034,30.0,1920,1080,train +eR67awnEM2Q,7711,8132,551-189-1372-1010,30.0,1920,1080,train +eR67awnEM2Q,8144,8800,554-188-1374-1008,30.0,1920,1080,train +eR67awnEM2Q,8866,9143,946-335-1649-1038,30.0,1920,1080,train +eR67awnEM2Q,9245,9457,804-326-1521-1043,30.0,1920,1080,train +eR67awnEM2Q,9781,9973,963-306-1702-1045,30.0,1920,1080,train +eR67awnEM2Q,10377,10642,512-190-1387-1065,30.0,1920,1080,train +eR67awnEM2Q,10642,10838,830-328-1573-1071,30.0,1920,1080,train +eR67awnEM2Q,10838,11058,823-291-1600-1068,30.0,1920,1080,train +eR67awnEM2Q,11064,11557,760-190-1643-1073,30.0,1920,1080,train +agjeMK0otQA,1570,2233,330-57-744-471,29.97,854,480,train +agjeMK0otQA,2843,3867,298-46-726-474,29.97,854,480,train +agjeMK0otQA,3867,4891,272-47-699-474,29.97,854,480,train +agjeMK0otQA,4891,5341,282-45-696-459,29.97,854,480,train +agjeMK0otQA,5381,5681,256-43-640-427,29.97,854,480,train +agjeMK0otQA,5681,6235,295-25-702-432,29.97,854,480,train +agjeMK0otQA,6235,6425,354-37-770-453,29.97,854,480,train +agjeMK0otQA,6425,6654,351-60-764-473,29.97,854,480,train +agjeMK0otQA,6654,7658,254-39-695-480,29.97,854,480,train +agjeMK0otQA,7660,7885,261-56-634-429,29.97,854,480,train +agjeMK0otQA,7885,8133,274-36-686-448,29.97,854,480,train +agjeMK0otQA,8133,8350,339-60-746-467,29.97,854,480,train +agjeMK0otQA,9042,9256,230-59-598-427,29.97,854,480,train +agjeMK0otQA,9300,9576,210-40-607-437,29.97,854,480,train +agjeMK0otQA,9576,9761,251-37-671-457,29.97,854,480,train +agjeMK0otQA,9761,9961,277-57-697-477,29.97,854,480,train +agjeMK0otQA,10698,10877,204-53-580-429,29.97,854,480,train +agjeMK0otQA,10877,11400,166-33-566-433,29.97,854,480,train +Yqt573H689c,26,451,552-103-1389-940,29.97,1920,1080,train +Yqt573H689c,451,1475,527-120-1388-981,29.97,1920,1080,train +Yqt573H689c,1475,2499,558-114-1416-972,29.97,1920,1080,train +Yqt573H689c,2499,2748,390-118-1263-991,29.97,1920,1080,train +Yqt573H689c,2748,3053,632-127-1486-981,29.97,1920,1080,train +Yqt573H689c,3053,3505,510-181-1305-976,29.97,1920,1080,train +Yqt573H689c,3632,4437,368-93-1305-1030,29.97,1920,1080,train +Yqt573H689c,4437,4955,593-157-1422-986,29.97,1920,1080,train +Yqt573H689c,5076,5278,368-178-1213-1023,29.97,1920,1080,train +Yqt573H689c,5280,6000,441-139-1294-992,29.97,1920,1080,train +Yqt573H689c,6316,6655,569-139-1452-1022,29.97,1920,1080,train +Yqt573H689c,6655,7166,502-143-1378-1019,29.97,1920,1080,train +Yqt573H689c,7216,7770,295-143-1154-1002,29.97,1920,1080,train +Yqt573H689c,7770,8169,170-141-1037-1008,29.97,1920,1080,train +Yqt573H689c,8169,8931,285-99-1197-1011,29.97,1920,1080,train +Yqt573H689c,8932,9127,366-99-1253-986,29.97,1920,1080,train +Yqt573H689c,9249,9501,207-74-1166-1033,29.97,1920,1080,train +Yqt573H689c,9872,10034,635-107-1471-943,29.97,1920,1080,train +Yqt573H689c,10190,10701,454-92-1392-1030,29.97,1920,1080,train +Yqt573H689c,10809,10959,329-119-1262-1052,29.97,1920,1080,train +Yqt573H689c,10987,11199,369-37-1388-1056,29.97,1920,1080,train +Yqt573H689c,11954,12168,544-64-1528-1048,29.97,1920,1080,train +Yqt573H689c,12291,12578,317-59-1270-1012,29.97,1920,1080,train +Yqt573H689c,12651,12823,517-108-1442-1033,29.97,1920,1080,train +Yqt573H689c,12860,13884,745-81-1715-1051,29.97,1920,1080,train +Yqt573H689c,13884,14560,613-142-1474-1003,29.97,1920,1080,train +Yqt573H689c,14560,14973,470-90-1397-1017,29.97,1920,1080,train +Yqt573H689c,14973,15474,547-44-1549-1046,29.97,1920,1080,train +Yqt573H689c,15475,15624,845-72-1755-982,29.97,1920,1080,train +Yqt573H689c,15624,16648,759-77-1707-1025,29.97,1920,1080,train +Yqt573H689c,16720,17044,777-134-1595-952,29.97,1920,1080,train +Yqt573H689c,17044,17241,871-93-1714-936,29.97,1920,1080,train +Yqt573H689c,17241,17782,675-101-1488-914,29.97,1920,1080,train +Yqt573H689c,17827,17967,902-146-1624-868,29.97,1920,1080,train +Yqt573H689c,18096,18306,718-77-1563-922,29.97,1920,1080,train +Yqt573H689c,18306,18617,516-109-1358-951,29.97,1920,1080,train +Yqt573H689c,18617,19217,678-104-1565-991,29.97,1920,1080,train +Yqt573H689c,19217,19855,638-47-1581-990,29.97,1920,1080,train +Yqt573H689c,19865,20797,802-73-1701-972,29.97,1920,1080,train +-p3QMYa_buo,150,938,81-5-537-461,29.97,640,480,train +-p3QMYa_buo,1203,1654,110-18-539-447,29.97,640,480,train +-p3QMYa_buo,1912,2095,120-24-547-451,29.97,640,480,train +-p3QMYa_buo,2100,2289,141-30-567-456,29.97,640,480,train +-p3QMYa_buo,2479,2808,127-14-568-455,29.97,640,480,train +-p3QMYa_buo,3029,3329,129-18-569-458,29.97,640,480,train +-p3QMYa_buo,3482,3819,120-33-537-450,29.97,640,480,train +-p3QMYa_buo,4004,4138,106-30-530-454,29.97,640,480,train +-p3QMYa_buo,4148,4302,134-26-569-461,29.97,640,480,train +-p3QMYa_buo,4503,4832,119-30-544-455,29.97,640,480,train +-p3QMYa_buo,5060,6084,110-27-538-455,29.97,640,480,train +-p3QMYa_buo,6084,6735,85-40-503-458,29.97,640,480,train +-p3QMYa_buo,6960,7340,135-25-563-453,29.97,640,480,train +-p3QMYa_buo,7522,7686,142-34-558-450,29.97,640,480,train +-p3QMYa_buo,7686,8423,114-41-543-470,29.97,640,480,train +-p3QMYa_buo,9274,9472,146-38-574-466,29.97,640,480,train +-p3QMYa_buo,9656,9945,137-25-569-457,29.97,640,480,train +-p3QMYa_buo,10107,10466,111-39-532-460,29.97,640,480,train +-p3QMYa_buo,10648,10799,96-0-530-434,29.97,640,480,train +-p3QMYa_buo,10799,11285,95-12-537-454,29.97,640,480,train +-p3QMYa_buo,11285,11772,78-27-526-475,29.97,640,480,train +E93J3Gtrxhs,122,445,933-277-1711-1055,29.97,1920,1080,train +E93J3Gtrxhs,593,744,917-283-1623-989,29.97,1920,1080,train +E93J3Gtrxhs,1344,1780,935-236-1742-1043,29.97,1920,1080,train +E93J3Gtrxhs,1979,2334,752-188-1622-1058,29.97,1920,1080,train +E93J3Gtrxhs,2334,2594,944-247-1724-1027,29.97,1920,1080,train +E93J3Gtrxhs,3390,3530,646-21-1585-960,29.97,1920,1080,train +E93J3Gtrxhs,5189,5360,332-34-1310-1012,29.97,1920,1080,train +E93J3Gtrxhs,5360,5537,324-141-1238-1055,29.97,1920,1080,train +ytT4iU7h-A8,1778,2049,93-28-1078-1013,29.97,1920,1080,train +ytT4iU7h-A8,2049,2389,309-80-1286-1057,29.97,1920,1080,train +ytT4iU7h-A8,2563,2880,70-43-1049-1022,29.97,1920,1080,train +ytT4iU7h-A8,2880,3044,71-116-891-936,29.97,1920,1080,train +ytT4iU7h-A8,3439,3635,206-70-1172-1036,29.97,1920,1080,train +ytT4iU7h-A8,3664,3887,245-24-1282-1061,29.97,1920,1080,train +ytT4iU7h-A8,3990,4119,383-21-1422-1060,29.97,1920,1080,train +ytT4iU7h-A8,4127,4291,215-78-1212-1075,29.97,1920,1080,train +ytT4iU7h-A8,4865,5048,397-27-1440-1070,29.97,1920,1080,train +ytT4iU7h-A8,6173,6517,424-8-1491-1075,29.97,1920,1080,train +ytT4iU7h-A8,6884,7149,407-125-1346-1064,29.97,1920,1080,train +ytT4iU7h-A8,10804,11024,262-47-1287-1072,29.97,1920,1080,train +sjzfA4dYMfg,1769,2003,310-10-1014-714,25.0,1280,720,train +sjzfA4dYMfg,2303,2447,283-15-982-714,25.0,1280,720,train +MHWHMZ1N8rk,0,1024,133-50-470-387,30.0,640,480,train +MHWHMZ1N8rk,1024,1191,140-48-475-383,30.0,640,480,train +MHWHMZ1N8rk,1914,2938,138-48-474-384,30.0,640,480,train +MHWHMZ1N8rk,2938,3962,133-42-475-384,30.0,640,480,train +MHWHMZ1N8rk,3962,4666,142-49-475-382,30.0,640,480,train +MHWHMZ1N8rk,4759,5246,149-54-477-382,30.0,640,480,train +MHWHMZ1N8rk,5246,6153,124-37-473-386,30.0,640,480,train +MHWHMZ1N8rk,6293,6513,131-47-467-383,30.0,640,480,train +MHWHMZ1N8rk,7196,7434,131-36-480-385,30.0,640,480,train +MHWHMZ1N8rk,7445,8469,133-42-475-384,30.0,640,480,train +MHWHMZ1N8rk,8469,8848,137-40-476-379,30.0,640,480,train +MHWHMZ1N8rk,8857,9562,138-33-487-382,30.0,640,480,train +cEOS2zoyQw4,2285,2500,276-24-949-697,25.0,1280,720,train +cEOS2zoyQw4,2587,2818,276-28-957-709,25.0,1280,720,train +cEOS2zoyQw4,2831,2976,298-42-955-699,25.0,1280,720,train +cEOS2zoyQw4,3492,3670,322-37-1000-715,25.0,1280,720,train +cEOS2zoyQw4,3724,4006,327-17-1001-691,25.0,1280,720,train +cEOS2zoyQw4,5678,6155,423-54-1087-718,25.0,1280,720,train +cEOS2zoyQw4,7372,7525,320-42-978-700,25.0,1280,720,train +cEOS2zoyQw4,7536,7742,308-21-1006-719,25.0,1280,720,train +cEOS2zoyQw4,8918,9068,269-25-921-677,25.0,1280,720,train +cEOS2zoyQw4,9648,10267,299-15-977-693,25.0,1280,720,train +cEOS2zoyQw4,10820,11648,298-4-977-683,25.0,1280,720,train +cEOS2zoyQw4,12796,13029,194-58-856-720,25.0,1280,720,train +cEOS2zoyQw4,13575,13853,284-12-986-714,25.0,1280,720,train +cEOS2zoyQw4,13965,14104,285-19-977-711,25.0,1280,720,train +cEOS2zoyQw4,15149,15819,300-15-989-704,25.0,1280,720,train +cEOS2zoyQw4,16021,16153,259-14-954-709,25.0,1280,720,train +jxAD7rOqDZs,323,483,320-80-817-577,29.97,1280,720,train +jxAD7rOqDZs,483,743,316-96-821-601,29.97,1280,720,train +jxAD7rOqDZs,961,1178,191-71-743-623,29.97,1280,720,train +jxAD7rOqDZs,1376,1661,121-152-598-629,29.97,1280,720,train +jxAD7rOqDZs,1705,1862,2-117-505-620,29.97,1280,720,train +jxAD7rOqDZs,2057,2234,253-128-748-623,29.97,1280,720,train +58m6HTXpBz8,1172,1382,576-137-1364-925,25.0,1920,1080,train +58m6HTXpBz8,1552,1772,508-128-1331-951,25.0,1920,1080,train +58m6HTXpBz8,2203,2352,873-37-1721-885,25.0,1920,1080,train +aOk2Diz9C9g,3328,3464,139-48-486-395,25.0,640,480,train +ZBVzUnF9eXU,31,658,157-99-505-447,30.0,640,480,train +ZBVzUnF9eXU,678,889,161-125-496-460,30.0,640,480,train +ZBVzUnF9eXU,955,1200,114-69-517-472,30.0,640,480,train +ZBVzUnF9eXU,1397,1610,104-156-396-448,30.0,640,480,train +ZBVzUnF9eXU,1803,2097,68-120-399-451,30.0,640,480,train +bXz9ZSVlSKo,149,484,160-96-514-450,25.0,600,480,train +bXz9ZSVlSKo,789,1596,149-82-519-452,25.0,600,480,train +bXz9ZSVlSKo,1607,1761,140-44-552-456,25.0,600,480,train +bXz9ZSVlSKo,2458,2612,165-107-508-450,25.0,600,480,train +bXz9ZSVlSKo,2824,2986,168-113-506-451,25.0,600,480,train +bXz9ZSVlSKo,2986,3118,159-96-513-450,25.0,600,480,train +bXz9ZSVlSKo,3268,3578,157-93-516-452,25.0,600,480,train +bXz9ZSVlSKo,3793,4046,158-93-516-451,25.0,600,480,train +bXz9ZSVlSKo,4169,5047,141-58-538-455,25.0,600,480,train +bXz9ZSVlSKo,5047,5254,137-35-561-459,25.0,600,480,train +bXz9ZSVlSKo,5322,5477,155-85-521-451,25.0,600,480,train +bXz9ZSVlSKo,5995,6214,155-89-518-452,25.0,600,480,train +bXz9ZSVlSKo,6315,6591,166-110-509-453,25.0,600,480,train +bXz9ZSVlSKo,6819,7523,159-95-516-452,25.0,600,480,train +bXz9ZSVlSKo,7530,8444,139-55-540-456,25.0,600,480,train +bXz9ZSVlSKo,8444,8973,127-30-561-464,25.0,600,480,train +bXz9ZSVlSKo,9083,9241,167-110-504-447,25.0,600,480,train +bXz9ZSVlSKo,9241,10265,150-87-521-458,25.0,600,480,train +bXz9ZSVlSKo,10265,10536,161-95-515-449,25.0,600,480,train +5dsMA7KF_b0,150,1095,510-144-1245-879,29.97,1920,1080,train +5dsMA7KF_b0,1297,1490,922-217-1549-844,29.97,1920,1080,train +5dsMA7KF_b0,1758,2065,832-212-1476-856,29.97,1920,1080,train +5dsMA7KF_b0,2479,2648,987-324-1525-862,29.97,1920,1080,train +5dsMA7KF_b0,2712,2928,944-305-1504-865,29.97,1920,1080,train +5dsMA7KF_b0,3152,3338,706-298-1295-887,29.97,1920,1080,train +5dsMA7KF_b0,3890,4052,779-236-1385-842,29.97,1920,1080,train +5dsMA7KF_b0,4646,4972,773-221-1368-816,29.97,1920,1080,train +5dsMA7KF_b0,5095,5246,796-235-1372-811,29.97,1920,1080,train +5dsMA7KF_b0,5306,5472,793-207-1405-819,29.97,1920,1080,train +5dsMA7KF_b0,6092,6255,978-316-1535-873,29.97,1920,1080,train +5dsMA7KF_b0,6368,6561,1026-306-1587-867,29.97,1920,1080,train +5dsMA7KF_b0,6561,6739,1101-359-1634-892,29.97,1920,1080,train +5dsMA7KF_b0,7949,8091,278-176-951-849,29.97,1920,1080,train +5dsMA7KF_b0,8632,8819,531-178-1231-878,29.97,1920,1080,train +5dsMA7KF_b0,8920,9234,836-194-1513-871,29.97,1920,1080,train +5dsMA7KF_b0,9347,9648,782-175-1489-882,29.97,1920,1080,train +5dsMA7KF_b0,9648,9940,736-139-1479-882,29.97,1920,1080,train +8xSkbMUpegs,1680,1920,322-89-923-690,29.97,1280,720,train +8xSkbMUpegs,2185,2400,221-75-866-720,29.97,1280,720,train +8xSkbMUpegs,2926,3144,294-14-946-666,29.97,1280,720,train +8xSkbMUpegs,3160,3323,414-78-998-662,29.97,1280,720,train +8xSkbMUpegs,3476,3697,330-56-946-672,29.97,1280,720,train +8xSkbMUpegs,3840,4000,475-90-1015-630,29.97,1280,720,train +8xSkbMUpegs,4406,4565,671-106-1149-584,29.97,1280,720,train +8xSkbMUpegs,4665,4800,620-10-1151-541,29.97,1280,720,train +8xSkbMUpegs,5365,5640,516-89-1017-590,29.97,1280,720,train +8xSkbMUpegs,8569,8907,254-32-868-646,29.97,1280,720,train +8xSkbMUpegs,9625,9880,393-65-983-655,29.97,1280,720,train +8xSkbMUpegs,11849,12000,383-70-988-675,29.97,1280,720,train +aJjr_OD8M4A,17249,18273,487-168-1397-1078,29.97,1920,1080,train +A-0smsWdrog,0,480,569-143-1343-917,30.0,1920,1080,train +A-0smsWdrog,1500,1679,857-45-1658-846,30.0,1920,1080,train +A-0smsWdrog,2998,3130,706-107-1468-869,30.0,1920,1080,train +A-0smsWdrog,4667,4854,485-97-1399-1011,30.0,1920,1080,train +A-0smsWdrog,5370,5700,370-106-1287-1023,30.0,1920,1080,train +A-0smsWdrog,6405,6555,266-162-1023-919,30.0,1920,1080,train +A-0smsWdrog,6922,7079,646-135-1439-928,30.0,1920,1080,train +A-0smsWdrog,7980,8162,448-31-1443-1026,30.0,1920,1080,train +A-0smsWdrog,10538,10674,172-129-960-917,30.0,1920,1080,train +A-0smsWdrog,11346,11477,595-223-1293-921,30.0,1920,1080,train +w2awOCDRtrc,1729,2009,120-40-509-429,25.0,640,480,test +w2awOCDRtrc,3059,3190,139-32-540-433,25.0,640,480,test +sSNvvVqnd_8,46,262,75-79-458-462,25.0,640,480,train +sSNvvVqnd_8,1286,2310,130-77-518-465,25.0,640,480,train +sSNvvVqnd_8,2397,3014,195-71-587-463,25.0,640,480,train +sSNvvVqnd_8,3099,3708,102-79-491-468,25.0,640,480,train +sSNvvVqnd_8,3708,4423,202-96-565-459,25.0,640,480,train +sSNvvVqnd_8,4423,5042,144-72-527-455,25.0,640,480,train +sSNvvVqnd_8,5208,5376,194-91-548-445,25.0,640,480,train +sSNvvVqnd_8,5376,5577,182-89-547-454,25.0,640,480,train +sSNvvVqnd_8,5700,6078,139-84-501-446,25.0,640,480,train +sSNvvVqnd_8,6188,6990,119-87-494-462,25.0,640,480,train +sSNvvVqnd_8,7131,7297,99-49-527-477,25.0,640,480,train +sSNvvVqnd_8,7297,8321,93-78-488-473,25.0,640,480,train +sSNvvVqnd_8,8321,8851,95-93-461-459,25.0,640,480,train +sSNvvVqnd_8,8863,9145,165-89-543-467,25.0,640,480,train +sSNvvVqnd_8,9704,9982,194-70-599-475,25.0,640,480,train +sSNvvVqnd_8,9982,10197,105-105-461-461,25.0,640,480,train +sSNvvVqnd_8,10197,10363,63-87-431-455,25.0,640,480,train +sSNvvVqnd_8,10363,10503,164-96-537-469,25.0,640,480,train +sSNvvVqnd_8,10503,10878,185-95-552-462,25.0,640,480,train +AYrUqlwE6n8,0,1024,372-66-999-693,29.97,1280,720,train +AYrUqlwE6n8,1024,2048,354-71-981-698,29.97,1280,720,train +AYrUqlwE6n8,2069,3093,458-125-901-568,29.97,1280,720,train +AYrUqlwE6n8,3093,4117,464-126-903-565,29.97,1280,720,train +AYrUqlwE6n8,4117,5141,375-136-801-562,29.97,1280,720,train +AYrUqlwE6n8,5141,6165,439-138-869-568,29.97,1280,720,train +AYrUqlwE6n8,6165,6328,459-144-898-583,29.97,1280,720,train +AYrUqlwE6n8,6328,6814,510-149-965-604,29.97,1280,720,train +AYrUqlwE6n8,6814,7800,423-138-908-623,29.97,1280,720,train +AYrUqlwE6n8,7800,7965,285-132-800-647,29.97,1280,720,train +AYrUqlwE6n8,7965,8449,366-130-900-664,29.97,1280,720,train +AYrUqlwE6n8,8662,8951,352-94-919-661,29.97,1280,720,train +AYrUqlwE6n8,8985,9166,424-110-892-578,29.97,1280,720,train +AYrUqlwE6n8,9166,10190,443-111-933-601,29.97,1280,720,train +AYrUqlwE6n8,10190,10622,499-144-961-606,29.97,1280,720,train +AYrUqlwE6n8,10677,11366,285-104-836-655,29.97,1280,720,train +AYrUqlwE6n8,11573,12431,401-113-934-646,29.97,1280,720,train +AYrUqlwE6n8,12487,13329,423-112-925-614,29.97,1280,720,train +AYrUqlwE6n8,13329,13960,433-123-947-637,29.97,1280,720,train +AYrUqlwE6n8,14060,14200,278-124-803-649,29.97,1280,720,train +AYrUqlwE6n8,14200,14467,384-139-886-641,29.97,1280,720,train +AYrUqlwE6n8,14531,15555,331-99-893-661,29.97,1280,720,train +AYrUqlwE6n8,15555,16177,433-134-963-664,29.97,1280,720,train +AYrUqlwE6n8,16218,16904,409-124-946-661,29.97,1280,720,train +AYrUqlwE6n8,16904,17928,411-97-981-667,29.97,1280,720,train +AYrUqlwE6n8,17928,18468,371-102-910-641,29.97,1280,720,train +AYrUqlwE6n8,18470,19494,466-118-915-567,29.97,1280,720,train +AYrUqlwE6n8,19494,20243,386-146-817-577,29.97,1280,720,train +AYrUqlwE6n8,20243,20397,436-158-875-597,29.97,1280,720,train +AYrUqlwE6n8,20425,20884,496-100-1004-608,29.97,1280,720,train +AYrUqlwE6n8,20936,21373,444-133-953-642,29.97,1280,720,train +AYrUqlwE6n8,21564,22588,386-115-959-688,29.97,1280,720,train +AYrUqlwE6n8,22667,23691,372-137-875-640,29.97,1280,720,train +AYrUqlwE6n8,23691,23952,344-128-871-655,29.97,1280,720,train +AYrUqlwE6n8,23952,24976,360-96-906-642,29.97,1280,720,train +AYrUqlwE6n8,24976,25214,398-146-872-620,29.97,1280,720,train +AYrUqlwE6n8,25405,25694,388-126-936-674,29.97,1280,720,train +AYrUqlwE6n8,26008,26995,355-70-971-686,29.97,1280,720,train +KrsQLNWK9Jc,1022,1224,508-135-1074-701,22.0,1280,720,train +KrsQLNWK9Jc,1422,1572,582-140-1132-690,22.0,1280,720,train +KrsQLNWK9Jc,1572,1747,504-146-1045-687,22.0,1280,720,train +KrsQLNWK9Jc,1747,1963,353-151-906-704,22.0,1280,720,train +KrsQLNWK9Jc,1963,2106,232-156-775-699,22.0,1280,720,train +KrsQLNWK9Jc,2243,2493,208-144-765-701,22.0,1280,720,train +KrsQLNWK9Jc,2864,3057,362-128-927-693,22.0,1280,720,train +KrsQLNWK9Jc,3057,3240,482-138-1025-681,22.0,1280,720,train +KrsQLNWK9Jc,3258,3455,329-109-903-683,22.0,1280,720,train +KrsQLNWK9Jc,4401,4669,379-153-939-713,22.0,1280,720,train +KrsQLNWK9Jc,4769,5021,321-103-918-700,22.0,1280,720,train +ItJsAxi-vME,5870,6028,239-172-636-569,29.97,1280,720,train +ItJsAxi-vME,7041,7281,375-161-769-555,29.97,1280,720,train +ItJsAxi-vME,8469,8631,703-135-1002-434,29.97,1280,720,train +ItJsAxi-vME,9463,9608,352-41-814-503,29.97,1280,720,train +ItJsAxi-vME,9764,9911,228-64-637-473,29.97,1280,720,train +ItJsAxi-vME,10068,10212,149-91-661-603,29.97,1280,720,train +ItJsAxi-vME,10709,10923,90-161-585-656,29.97,1280,720,train +ItJsAxi-vME,11152,11322,159-157-613-611,29.97,1280,720,train +ItJsAxi-vME,11975,12127,261-126-728-593,29.97,1280,720,train +ItJsAxi-vME,13625,13900,38-72-592-626,29.97,1280,720,train +ItJsAxi-vME,14683,14828,137-133-557-553,29.97,1280,720,train +ItJsAxi-vME,15953,16769,638-139-1160-661,29.97,1280,720,train +ItJsAxi-vME,17286,17440,616-147-1049-580,29.97,1280,720,train +ItJsAxi-vME,18233,18396,178-162-657-641,29.97,1280,720,train +ItJsAxi-vME,19329,19556,315-180-806-671,29.97,1280,720,train +ItJsAxi-vME,19847,20146,370-144-910-684,29.97,1280,720,train +ItJsAxi-vME,20251,20435,571-124-1094-647,29.97,1280,720,train +ItJsAxi-vME,21527,21664,634-153-1036-555,29.97,1280,720,train +ItJsAxi-vME,24016,24192,386-138-888-640,29.97,1280,720,train +ItJsAxi-vME,25569,25723,569-200-894-525,29.97,1280,720,train +ItJsAxi-vME,25924,26119,670-182-1021-533,29.97,1280,720,train +ItJsAxi-vME,26119,26272,605-163-986-544,29.97,1280,720,train +DdscFyAZurI,20,380,125-127-400-402,25.0,640,480,train +DdscFyAZurI,500,648,152-142-459-449,25.0,640,480,train +DdscFyAZurI,648,864,175-175-452-452,25.0,640,480,train +DdscFyAZurI,1999,2159,144-59-550-465,25.0,640,480,train +DdscFyAZurI,4384,4536,196-71-582-457,25.0,640,480,train +DdscFyAZurI,4627,5001,280-106-600-426,25.0,640,480,train +DdscFyAZurI,5004,5184,262-99-581-418,25.0,640,480,train +DdscFyAZurI,5499,5663,153-88-534-469,25.0,640,480,train +O3aNcFLu-98,6102,6322,244-25-770-551,29.97,1280,720,train +O3aNcFLu-98,8088,8228,169-98-696-625,29.97,1280,720,train +O3aNcFLu-98,9710,9877,159-115-616-572,29.97,1280,720,train +O3aNcFLu-98,10465,10632,607-45-1173-611,29.97,1280,720,train +O3aNcFLu-98,11283,11453,127-71-652-596,29.97,1280,720,train +O3aNcFLu-98,11654,11905,161-102-662-603,29.97,1280,720,train +O3aNcFLu-98,12747,12888,349-44-904-599,29.97,1280,720,train +WvpZGirYxi0,299,894,394-217-1161-984,24.0,1920,1080,train +WvpZGirYxi0,957,1087,120-208-897-985,24.0,1920,1080,train +WvpZGirYxi0,1122,1589,285-222-1139-1076,24.0,1920,1080,train +WvpZGirYxi0,1615,2185,94-199-886-991,24.0,1920,1080,train +WvpZGirYxi0,2258,2808,289-166-1152-1029,24.0,1920,1080,train +WvpZGirYxi0,2869,3043,633-231-1402-1000,24.0,1920,1080,train +WvpZGirYxi0,3273,3460,1047-180-1862-995,24.0,1920,1080,train +WvpZGirYxi0,3607,3803,1034-126-1889-981,24.0,1920,1080,train +zHsLRAPJkMM,0,200,826-534-1324-1032,29.97,1920,1080,train +zHsLRAPJkMM,322,617,832-543-1326-1037,29.97,1920,1080,train +zHsLRAPJkMM,5037,5166,992-497-1573-1078,29.97,1920,1080,train +zHsLRAPJkMM,5442,5645,1008-477-1609-1078,29.97,1920,1080,train +zHsLRAPJkMM,6089,6218,959-466-1490-997,29.97,1920,1080,train +zHsLRAPJkMM,8064,8382,696-172-1569-1045,29.97,1920,1080,train +zHsLRAPJkMM,8638,8853,601-337-1199-935,29.97,1920,1080,train +zHsLRAPJkMM,10071,10220,826-496-1377-1047,29.97,1920,1080,train +zHsLRAPJkMM,10896,11027,905-511-1417-1023,29.97,1920,1080,train +zHsLRAPJkMM,12402,12685,939-445-1509-1015,29.97,1920,1080,train +zHsLRAPJkMM,14839,14978,603-468-1081-946,29.97,1920,1080,train +zHsLRAPJkMM,17023,17198,471-536-997-1062,29.97,1920,1080,train +zHsLRAPJkMM,17754,17912,158-463-723-1028,29.97,1920,1080,train +zHsLRAPJkMM,19418,19631,486-520-1003-1037,29.97,1920,1080,train +zHsLRAPJkMM,20948,21279,664-371-1366-1073,29.97,1920,1080,train +zHsLRAPJkMM,22932,23113,1082-460-1673-1051,29.97,1920,1080,train +zHsLRAPJkMM,23113,23307,1032-518-1559-1045,29.97,1920,1080,train +zHsLRAPJkMM,23519,23658,617-451-1243-1077,29.97,1920,1080,train +zHsLRAPJkMM,25274,25419,847-466-1411-1030,29.97,1920,1080,train +zHsLRAPJkMM,25581,25755,708-512-1236-1040,29.97,1920,1080,train +zHsLRAPJkMM,25880,26011,870-474-1476-1080,29.97,1920,1080,train +zHsLRAPJkMM,26291,26430,787-546-1258-1017,29.97,1920,1080,train +zHsLRAPJkMM,26438,26583,832-506-1317-991,29.97,1920,1080,train +zHsLRAPJkMM,26846,27055,1108-477-1544-913,29.97,1920,1080,train +l_faFvmpDrM,366,725,377-35-1044-702,30.0,1280,720,train +xMYVjC4Ip3I,433,1457,156-84-484-412,25.0,640,480,train +xMYVjC4Ip3I,1476,2500,163-89-480-406,25.0,640,480,train +xMYVjC4Ip3I,4829,5142,190-121-469-400,25.0,640,480,train +xMYVjC4Ip3I,5208,5373,182-97-487-402,25.0,640,480,train +xMYVjC4Ip3I,5477,6501,153-65-493-405,25.0,640,480,train +xMYVjC4Ip3I,6501,6931,180-109-474-403,25.0,640,480,train +xMYVjC4Ip3I,6931,7125,184-95-492-403,25.0,640,480,train +xMYVjC4Ip3I,7125,7294,158-78-490-410,25.0,640,480,train +xMYVjC4Ip3I,7294,8318,156-62-496-402,25.0,640,480,train +xMYVjC4Ip3I,8318,8973,168-78-493-403,25.0,640,480,train +42P58QDIgno,87,603,310-119-827-636,25.0,1280,720,train +42P58QDIgno,908,1041,244-153-778-687,25.0,1280,720,train +42P58QDIgno,1042,1187,231-87-826-682,25.0,1280,720,train +42P58QDIgno,1614,1758,330-111-896-677,25.0,1280,720,train +42P58QDIgno,1808,2164,395-94-950-649,25.0,1280,720,train +42P58QDIgno,2364,2529,442-142-912-612,25.0,1280,720,train +42P58QDIgno,2529,2772,468-198-903-633,25.0,1280,720,train +42P58QDIgno,3174,3372,409-145-942-678,25.0,1280,720,train +42P58QDIgno,3372,3564,469-115-1038-684,25.0,1280,720,train +42P58QDIgno,4120,4262,186-74-817-705,25.0,1280,720,train +42P58QDIgno,5174,5345,341-112-905-676,25.0,1280,720,train +42P58QDIgno,5791,5925,330-92-923-685,25.0,1280,720,train +42P58QDIgno,6079,6215,436-108-986-658,25.0,1280,720,train +42P58QDIgno,8471,8646,390-79-1017-706,25.0,1280,720,train +42P58QDIgno,8690,9030,220-15-919-714,25.0,1280,720,train +uCR05ckHiFE,72,1096,719-202-1367-850,29.97,1920,1080,train +uCR05ckHiFE,1275,1916,790-238-1328-776,29.97,1920,1080,train +uCR05ckHiFE,2238,2734,699-216-1274-791,29.97,1920,1080,train +uCR05ckHiFE,2897,3045,673-208-1290-825,29.97,1920,1080,train +uCR05ckHiFE,3267,3428,796-213-1355-772,29.97,1920,1080,train +uCR05ckHiFE,4254,4432,616-209-1198-791,29.97,1920,1080,train +uCR05ckHiFE,4432,4651,638-223-1233-818,29.97,1920,1080,train +uCR05ckHiFE,4659,4842,669-152-1371-854,29.97,1920,1080,train +uCR05ckHiFE,5001,5163,703-207-1380-884,29.97,1920,1080,train +uCR05ckHiFE,5204,5333,681-186-1330-835,29.97,1920,1080,train +uCR05ckHiFE,5606,5751,687-251-1307-871,29.97,1920,1080,train +uCR05ckHiFE,5828,5990,784-227-1365-808,29.97,1920,1080,train +uCR05ckHiFE,6206,6474,768-318-1327-877,29.97,1920,1080,train +uCR05ckHiFE,6906,7176,614-267-1301-954,29.97,1920,1080,train +_64NZbqcISg,0,199,141-101-426-386,25.0,640,480,train +cgcuo2lfdEc,0,163,270-151-780-661,29.97,1280,720,train +cgcuo2lfdEc,163,328,282-123-823-664,29.97,1280,720,train +cgcuo2lfdEc,360,500,236-65-822-651,29.97,1280,720,train +cgcuo2lfdEc,500,733,215-32-818-635,29.97,1280,720,train +cgcuo2lfdEc,774,1020,185-138-733-686,29.97,1280,720,train +cgcuo2lfdEc,1103,1363,295-141-833-679,29.97,1280,720,train +cgcuo2lfdEc,1363,1888,292-115-833-656,29.97,1280,720,train +cgcuo2lfdEc,1978,2250,225-22-876-673,29.97,1280,720,train +cgcuo2lfdEc,2501,2672,407-171-911-675,29.97,1280,720,train +cgcuo2lfdEc,2738,2890,354-114-924-684,29.97,1280,720,train +qBdCNti65Wc,1422,1929,97-43-528-474,29.97,640,480,train +qBdCNti65Wc,1929,2250,122-22-568-468,29.97,640,480,train +qBdCNti65Wc,2432,2700,70-15-527-472,29.97,640,480,train +qBdCNti65Wc,3119,3358,104-34-541-471,29.97,640,480,train +qBdCNti65Wc,3360,3536,108-68-511-471,29.97,640,480,train +qBdCNti65Wc,3895,4063,108-79-497-468,29.97,640,480,train +qBdCNti65Wc,5096,5385,104-57-512-465,29.97,640,480,train +qBdCNti65Wc,5385,6409,93-36-519-462,29.97,640,480,train +qBdCNti65Wc,6409,6778,111-85-489-463,29.97,640,480,train +qBdCNti65Wc,6877,7363,103-77-485-459,29.97,640,480,train +qBdCNti65Wc,7521,8545,110-48-528-466,29.97,640,480,train +qBdCNti65Wc,8667,9016,76-12-524-460,29.97,640,480,train +n1ic2Kxv0lo,600,750,115-71-522-478,30.0,640,480,train +n1ic2Kxv0lo,1650,1800,162-80-528-446,30.0,640,480,train +n1ic2Kxv0lo,1950,2100,178-82-548-452,30.0,640,480,train +n1ic2Kxv0lo,2400,2550,155-89-541-475,30.0,640,480,train +n1ic2Kxv0lo,2700,2850,155-85-534-464,30.0,640,480,train +n1ic2Kxv0lo,3450,3600,145-26-560-441,30.0,640,480,train +n1ic2Kxv0lo,3794,4050,131-37-560-466,30.0,640,480,train +n1ic2Kxv0lo,4502,4650,129-35-571-477,30.0,640,480,train +n1ic2Kxv0lo,4650,4800,103-28-537-462,30.0,640,480,train +n1ic2Kxv0lo,4848,5019,91-1-555-465,30.0,640,480,train +n1ic2Kxv0lo,5400,5597,121-27-569-475,30.0,640,480,train +n1ic2Kxv0lo,7820,7972,122-36-544-458,30.0,640,480,train +n1ic2Kxv0lo,8408,8550,131-66-536-471,30.0,640,480,train +n1ic2Kxv0lo,8700,9150,97-23-546-472,30.0,640,480,train +LfBbfOEVV3c,29,282,626-411-1153-938,25.0,1920,1080,train +LfBbfOEVV3c,593,746,530-399-1121-990,25.0,1920,1080,train +LfBbfOEVV3c,841,1175,616-431-1162-977,25.0,1920,1080,train +LfBbfOEVV3c,3916,4071,618-474-1138-994,25.0,1920,1080,train +LfBbfOEVV3c,4275,4429,624-482-1106-964,25.0,1920,1080,train +HOsiZya4deQ,3227,3396,98-1-559-462,25.0,640,480,train +HOsiZya4deQ,8561,8691,84-1-562-479,25.0,640,480,train +HOsiZya4deQ,8796,8924,96-5-543-452,25.0,640,480,train +e4VIw41R-PU,274,680,364-50-972-658,29.97,1280,720,train +e4VIw41R-PU,905,1121,330-93-946-709,29.97,1280,720,train +e4VIw41R-PU,1121,1433,267-57-887-677,29.97,1280,720,train +e4VIw41R-PU,1819,1969,225-64-867-706,29.97,1280,720,train +e4VIw41R-PU,2127,2262,155-78-794-717,29.97,1280,720,train +e4VIw41R-PU,2262,2508,282-49-897-664,29.97,1280,720,train +e4VIw41R-PU,2508,2701,209-101-807-699,29.97,1280,720,train +e4VIw41R-PU,2987,3140,203-55-834-686,29.97,1280,720,train +e4VIw41R-PU,5155,5286,364-69-962-667,29.97,1280,720,train +e4VIw41R-PU,5499,6211,355-50-1000-695,29.97,1280,720,train +e4VIw41R-PU,6225,6474,239-79-815-655,29.97,1280,720,train +e4VIw41R-PU,6602,7020,11-20-632-641,29.97,1280,720,train +e4VIw41R-PU,7194,7385,40-17-625-602,29.97,1280,720,train +e4VIw41R-PU,8660,8877,348-47-1004-703,29.97,1280,720,train +e4VIw41R-PU,9338,9537,302-102-854-654,29.97,1280,720,train +e4VIw41R-PU,9537,9785,390-127-921-658,29.97,1280,720,train +e4VIw41R-PU,9785,10060,477-118-1017-658,29.97,1280,720,train +e4VIw41R-PU,10085,10243,347-63-935-651,29.97,1280,720,train +j_UJ-M6KnLA,576,762,115-41-521-447,30.0,640,480,train +j_UJ-M6KnLA,1602,1755,85-56-481-452,30.0,640,480,train +j_UJ-M6KnLA,1779,1995,143-58-530-445,30.0,640,480,train +j_UJ-M6KnLA,2897,3026,183-40-590-447,30.0,640,480,train +j_UJ-M6KnLA,3273,3434,118-56-505-443,30.0,640,480,train +j_UJ-M6KnLA,3994,4201,149-84-500-435,30.0,640,480,train +j_UJ-M6KnLA,4647,4805,189-96-538-445,30.0,640,480,train +j_UJ-M6KnLA,9125,9276,122-90-455-423,30.0,640,480,train +j_UJ-M6KnLA,12123,12348,192-80-529-417,30.0,640,480,train +j_UJ-M6KnLA,12706,12892,167-87-507-427,30.0,640,480,train +uXjfq4otSPg,108,318,102-59-492-449,29.97,640,480,train +Z3pAsUWV5d8,669,1049,137-64-520-447,29.97,720,480,train +Z3pAsUWV5d8,1049,1215,86-54-473-441,29.97,720,480,train +Z3pAsUWV5d8,1215,1400,178-69-565-456,29.97,720,480,train +Z3pAsUWV5d8,1400,1738,129-48-536-455,29.97,720,480,train +Z3pAsUWV5d8,1738,2240,126-25-568-467,29.97,720,480,train +Z3pAsUWV5d8,2240,2403,180-68-571-459,29.97,720,480,train +Z3pAsUWV5d8,2403,2790,202-46-605-449,29.97,720,480,train +Z3pAsUWV5d8,3118,4142,115-50-528-463,29.97,720,480,train +Z3pAsUWV5d8,4142,4281,178-56-570-448,29.97,720,480,train +Z3pAsUWV5d8,4281,4414,185-35-607-457,29.97,720,480,train +kgx5w20u0k4,385,957,362-96-983-717,25.0,1280,720,train +kgx5w20u0k4,957,1204,324-99-943-718,25.0,1280,720,train +kgx5w20u0k4,1803,2117,338-92-962-716,25.0,1280,720,train +kgx5w20u0k4,3594,3778,346-95-954-703,25.0,1280,720,train +ZAycgaoKipI,107,496,152-9-623-480,30.0,640,480,train +ZAycgaoKipI,3778,3967,206-41-637-472,30.0,640,480,train +ZAycgaoKipI,5284,5413,62-63-478-479,30.0,640,480,train +m4ccN_yYKlA,3039,3220,396-112-990-706,29.97,1280,720,train +m4ccN_yYKlA,6281,7305,338-30-1021-713,29.97,1280,720,train +m4ccN_yYKlA,7393,8417,339-22-1027-710,29.97,1280,720,train +m4ccN_yYKlA,8417,9143,347-18-1041-712,29.97,1280,720,train +m4ccN_yYKlA,9143,9753,416-25-1107-716,29.97,1280,720,train +m4ccN_yYKlA,12409,13433,326-16-1028-718,29.97,1280,720,train +m4ccN_yYKlA,13433,13814,316-13-1015-712,29.97,1280,720,train +m4ccN_yYKlA,13860,14884,316-31-999-714,29.97,1280,720,train +m4ccN_yYKlA,15047,15483,246-30-932-716,29.97,1280,720,train +m4ccN_yYKlA,15528,16552,331-29-1010-708,29.97,1280,720,train +m4ccN_yYKlA,16552,17524,375-57-1016-698,29.97,1280,720,train +m4ccN_yYKlA,17524,18548,328-24-1013-709,29.97,1280,720,train +m4ccN_yYKlA,18548,18718,376-98-980-702,29.97,1280,720,train +m4ccN_yYKlA,18743,19202,353-32-1030-709,29.97,1280,720,train +KYdyIdusD0g,426,876,150-70-541-461,25.0,640,480,train +KYdyIdusD0g,876,1641,143-69-535-461,25.0,640,480,train +KYdyIdusD0g,2665,3001,77-56-497-476,25.0,640,480,train +KYdyIdusD0g,3126,3282,145-83-518-456,25.0,640,480,train +KYdyIdusD0g,3322,3841,109-64-509-464,25.0,640,480,train +KYdyIdusD0g,3925,4124,213-65-618-470,25.0,640,480,train +KYdyIdusD0g,4124,4626,109-53-523-467,25.0,640,480,train +KYdyIdusD0g,4728,4931,223-45-629-451,25.0,640,480,train +KYdyIdusD0g,4931,5402,224-49-610-435,25.0,640,480,train +KYdyIdusD0g,5530,5676,172-35-567-430,25.0,640,480,train +kgFEyCiX1To,0,1024,76-41-505-470,29.97,640,480,train +kgFEyCiX1To,1094,1238,59-45-494-480,29.97,640,480,train +kgFEyCiX1To,6720,7041,86-11-523-448,29.97,640,480,train +kgFEyCiX1To,8330,8458,107-0-574-467,29.97,640,480,train +kgFEyCiX1To,9911,10056,134-51-548-465,29.97,640,480,train +-nRYJnnnTuk,523,688,744-292-1364-912,29.97,1440,1080,train +-nRYJnnnTuk,3891,4024,743-423-1239-919,29.97,1440,1080,train +-nRYJnnnTuk,4025,4229,726-408-1230-912,29.97,1440,1080,train +-nRYJnnnTuk,5368,5543,566-340-1140-914,29.97,1440,1080,train +-nRYJnnnTuk,6217,6377,702-438-1191-927,29.97,1440,1080,train +-nRYJnnnTuk,6411,6621,678-421-1196-939,29.97,1440,1080,train +-nRYJnnnTuk,6957,7093,723-315-1328-920,29.97,1440,1080,train +KtVCCLlkcKg,72,1096,587-93-1444-950,23.97,1920,1080,train +KtVCCLlkcKg,1096,2120,577-90-1412-925,23.97,1920,1080,train +KtVCCLlkcKg,2120,2393,570-83-1419-932,23.97,1920,1080,train +KtVCCLlkcKg,2399,3423,566-80-1413-927,23.97,1920,1080,train +KtVCCLlkcKg,3423,4115,576-110-1395-929,23.97,1920,1080,train +KtVCCLlkcKg,4115,4472,564-49-1469-954,23.97,1920,1080,train +KtVCCLlkcKg,4472,5496,550-3-1510-963,23.97,1920,1080,train +KtVCCLlkcKg,5496,5754,610-123-1434-947,23.97,1920,1080,train +a6qZnwdir0c,60,502,141-363-788-1010,30.0,1920,1080,train +a6qZnwdir0c,502,875,192-332-854-994,30.0,1920,1080,train +a6qZnwdir0c,1106,1280,186-239-832-885,30.0,1920,1080,train +a6qZnwdir0c,1572,1709,288-248-851-811,30.0,1920,1080,train +a6qZnwdir0c,2559,2892,85-299-785-999,30.0,1920,1080,train +a6qZnwdir0c,2892,3074,207-351-844-988,30.0,1920,1080,train +a6qZnwdir0c,3468,3731,471-292-1175-996,30.0,1920,1080,train +a6qZnwdir0c,4830,5021,790-369-1472-1051,30.0,1920,1080,train +a6qZnwdir0c,5326,5472,877-292-1638-1053,30.0,1920,1080,train +kgvcI9oe3NI,386,520,160-35-592-467,25.0,640,480,test +kgvcI9oe3NI,1578,1763,133-29-556-452,25.0,640,480,test +kgvcI9oe3NI,1818,2842,102-5-571-474,25.0,640,480,test +kgvcI9oe3NI,2890,3413,60-27-507-474,25.0,640,480,test +Ppk7BAngN_A,141,315,219-53-616-450,29.97,640,480,train +Ppk7BAngN_A,4433,4569,150-93-481-424,29.97,640,480,train +Ppk7BAngN_A,9338,9492,194-56-559-421,29.97,640,480,train +OgChhgaq8Zw,1441,1695,288-26-952-690,29.97,1280,720,train +OgChhgaq8Zw,2192,2648,251-15-913-677,29.97,1280,720,train +OgChhgaq8Zw,3125,3308,116-71-736-691,29.97,1280,720,train +OgChhgaq8Zw,5529,6019,299-9-990-700,29.97,1280,720,train +OgChhgaq8Zw,6019,6162,3-8-687-692,29.97,1280,720,train +OgChhgaq8Zw,6749,6960,60-38-732-710,29.97,1280,720,train +OgChhgaq8Zw,7304,8328,288-1-988-701,29.97,1280,720,train +2aDZU_WgtoY,0,395,126-143-401-418,25.0,640,480,train +2aDZU_WgtoY,6225,6498,175-98-462-385,25.0,640,480,train +MEtdtJwdfwg,127,704,350-86-976-712,25.0,1280,720,train +MEtdtJwdfwg,2795,3497,430-104-1041-715,25.0,1280,720,train +4h845uDRDR8,481,1205,620-113-1570-1063,29.97,1920,1080,train +4h845uDRDR8,1205,2229,887-174-1792-1079,29.97,1920,1080,train +4h845uDRDR8,2229,2362,831-202-1669-1040,29.97,1920,1080,train +4h845uDRDR8,2362,2579,949-183-1811-1045,29.97,1920,1080,train +4h845uDRDR8,2579,2809,746-183-1578-1015,29.97,1920,1080,train +4h845uDRDR8,3913,4552,507-152-1431-1076,29.97,1920,1080,train +4h845uDRDR8,6488,6831,528-141-1444-1057,29.97,1920,1080,train +4h845uDRDR8,7549,8104,409-77-1400-1068,29.97,1920,1080,train +4h845uDRDR8,8105,8313,354-121-1245-1012,29.97,1920,1080,train +4h845uDRDR8,8418,8720,608-126-1542-1060,29.97,1920,1080,train +4h845uDRDR8,8800,9824,457-124-1413-1080,29.97,1920,1080,train +4h845uDRDR8,9857,10554,541-178-1404-1041,29.97,1920,1080,train +4h845uDRDR8,10743,11552,180-38-1209-1067,29.97,1920,1080,train +4h845uDRDR8,11659,12669,480-106-1443-1069,29.97,1920,1080,train +4h845uDRDR8,12669,12942,245-124-1153-1032,29.97,1920,1080,train +4h845uDRDR8,13287,13586,440-9-1485-1054,29.97,1920,1080,train +qgWnTgmgi0Q,274,630,126-48-510-432,25.0,640,480,train +qgWnTgmgi0Q,630,818,155-90-496-431,25.0,640,480,train +qgWnTgmgi0Q,829,1006,98-77-448-427,25.0,640,480,train +qgWnTgmgi0Q,2700,2988,119-89-451-421,25.0,640,480,train +qgWnTgmgi0Q,2988,3132,133-96-451-414,25.0,640,480,train +qgWnTgmgi0Q,3923,4080,195-91-531-427,25.0,640,480,train +qgWnTgmgi0Q,4889,5039,178-79-517-418,25.0,640,480,train +qgWnTgmgi0Q,6132,6427,231-87-574-430,25.0,640,480,train +qgWnTgmgi0Q,6562,6706,165-65-530-430,25.0,640,480,train +qgWnTgmgi0Q,10358,10496,132-78-458-404,25.0,640,480,train +VMSqvTE90hk,165,530,100-85-454-439,29.97,640,480,test +VMSqvTE90hk,743,887,114-89-467-442,29.97,640,480,test +VMSqvTE90hk,2955,3097,181-71-559-449,29.97,640,480,test +VMSqvTE90hk,3198,3327,200-76-552-428,29.97,640,480,test +VMSqvTE90hk,3327,3476,157-54-524-421,29.97,640,480,test +VMSqvTE90hk,4248,4621,150-66-533-449,29.97,640,480,test +VMSqvTE90hk,6598,6749,78-91-414-427,29.97,640,480,test +VMSqvTE90hk,7168,7312,77-46-482-451,29.97,640,480,test +LHoIPFDYBCc,3922,4349,230-106-834-710,29.97,1280,720,train +LHoIPFDYBCc,7529,7681,458-51-1110-703,29.97,1280,720,train +LHoIPFDYBCc,7681,7892,424-17-1107-700,29.97,1280,720,train +LHoIPFDYBCc,7893,8187,343-62-985-704,29.97,1280,720,train +LHoIPFDYBCc,11054,11260,535-11-1244-720,29.97,1280,720,train +LHoIPFDYBCc,19928,20078,372-27-1014-669,29.97,1280,720,train +Tlou0HOrOE4,227,1042,179-77-507-405,29.97,640,480,train +Tlou0HOrOE4,1306,1504,191-52-562-423,29.97,640,480,train +Tlou0HOrOE4,2613,2755,193-124-457-388,29.97,640,480,train +Tlou0HOrOE4,4709,4862,214-105-501-392,29.97,640,480,train +Tlou0HOrOE4,5273,5506,213-104-480-371,29.97,640,480,train +Tlou0HOrOE4,5554,5700,243-82-525-364,29.97,640,480,train +Tlou0HOrOE4,6157,6335,218-122-486-390,29.97,640,480,train +Tlou0HOrOE4,6423,6603,182-102-470-390,29.97,640,480,train +Tlou0HOrOE4,6905,7118,190-62-524-396,29.97,640,480,train +Tlou0HOrOE4,8654,8811,224-122-491-389,29.97,640,480,train +Tlou0HOrOE4,9114,9252,117-107-431-421,29.97,640,480,train +rfXnvX5eTAM,2,621,469-28-1374-933,25.0,1920,1080,train +rfXnvX5eTAM,809,1162,484-75-1481-1072,25.0,1920,1080,train +rfXnvX5eTAM,1285,1440,584-68-1532-1016,25.0,1920,1080,train +rfXnvX5eTAM,1622,1853,512-48-1505-1041,25.0,1920,1080,train +rfXnvX5eTAM,1882,2027,621-130-1523-1032,25.0,1920,1080,train +rfXnvX5eTAM,3124,3336,319-62-1292-1035,25.0,1920,1080,train +rfXnvX5eTAM,3946,4085,325-68-1336-1079,25.0,1920,1080,train +eD0O4ZrSjjQ,199,1223,270-60-597-387,25.0,854,480,train +eD0O4ZrSjjQ,1223,1418,319-73-628-382,25.0,854,480,train +eD0O4ZrSjjQ,1418,1797,294-70-622-398,25.0,854,480,train +eD0O4ZrSjjQ,1802,1940,343-71-647-375,25.0,854,480,train +eD0O4ZrSjjQ,1942,2315,303-85-595-377,25.0,854,480,train +eD0O4ZrSjjQ,2315,3140,302-70-613-381,25.0,854,480,train +eD0O4ZrSjjQ,3140,3830,220-76-523-379,25.0,854,480,train +eD0O4ZrSjjQ,3870,4469,143-76-457-390,25.0,854,480,train +eD0O4ZrSjjQ,4628,4758,295-83-602-390,25.0,854,480,train +eD0O4ZrSjjQ,4935,5125,255-82-578-405,25.0,854,480,train +eD0O4ZrSjjQ,5125,5779,308-66-647-405,25.0,854,480,train +eD0O4ZrSjjQ,5877,6901,201-65-528-392,25.0,854,480,train +eD0O4ZrSjjQ,6901,7386,234-80-529-375,25.0,854,480,train +eD0O4ZrSjjQ,7387,7548,278-84-574-380,25.0,854,480,train +eD0O4ZrSjjQ,7548,7696,201-83-490-372,25.0,854,480,train +eD0O4ZrSjjQ,7702,7917,240-94-522-376,25.0,854,480,train +eD0O4ZrSjjQ,7943,8180,265-93-542-370,25.0,854,480,train +eD0O4ZrSjjQ,8240,8451,311-86-610-385,25.0,854,480,train +eD0O4ZrSjjQ,8451,9475,294-69-614-389,25.0,854,480,train +eD0O4ZrSjjQ,9560,9988,315-79-601-365,25.0,854,480,train +eD0O4ZrSjjQ,10002,10312,257-48-579-370,25.0,854,480,train +eD0O4ZrSjjQ,10313,10553,287-95-559-367,25.0,854,480,train +X7Q04MoQxXo,51,272,273-162-535-424,29.97,854,480,train +X7Q04MoQxXo,440,577,268-170-553-455,29.97,854,480,train +X7Q04MoQxXo,661,791,211-183-485-457,29.97,854,480,train +X7Q04MoQxXo,834,985,217-174-496-453,29.97,854,480,train +X7Q04MoQxXo,1379,1606,287-132-563-408,29.97,854,480,train +X7Q04MoQxXo,4037,4187,327-161-590-424,29.97,854,480,train +X7Q04MoQxXo,4480,4689,298-152-568-422,29.97,854,480,train +X7Q04MoQxXo,6739,6869,276-110-552-386,29.97,854,480,train +JHaOu5Z2ScI,1302,1530,344-103-714-473,29.97,854,480,train +JHaOu5Z2ScI,4591,4752,263-62-680-479,29.97,854,480,train +JHaOu5Z2ScI,5431,5562,280-70-677-467,29.97,854,480,train +JHaOu5Z2ScI,6634,6769,212-70-617-475,29.97,854,480,train +R108l6J4N1o,634,767,381-189-1108-916,30.0,1920,1080,train +R108l6J4N1o,1547,1727,378-41-1183-846,30.0,1920,1080,train +R108l6J4N1o,3244,3387,398-23-1192-817,30.0,1920,1080,train +R108l6J4N1o,9079,9210,1484-138-1904-558,30.0,1920,1080,train +R108l6J4N1o,9666,10014,510-105-1314-909,30.0,1920,1080,train +R108l6J4N1o,10596,10732,1333-82-1760-509,30.0,1920,1080,train +R108l6J4N1o,10742,10968,1422-121-1851-550,30.0,1920,1080,train +wbwUCsr9SR8,183,347,316-91-887-662,29.97,1280,720,train +wbwUCsr9SR8,3634,3857,432-97-976-641,29.97,1280,720,train +wbwUCsr9SR8,3896,4104,424-188-840-604,29.97,1280,720,train +KjMxdYJOwqI,0,470,31-19-652-640,25.0,1280,720,train +KjMxdYJOwqI,543,747,121-28-695-602,25.0,1280,720,train +KjMxdYJOwqI,1240,1599,307-1-962-656,25.0,1280,720,train +KjMxdYJOwqI,1599,2068,391-13-1070-692,25.0,1280,720,train +KjMxdYJOwqI,2687,2970,239-23-902-686,25.0,1280,720,train +KjMxdYJOwqI,3782,4050,57-38-662-643,25.0,1280,720,train +KjMxdYJOwqI,4050,4644,16-10-624-618,25.0,1280,720,train +KjMxdYJOwqI,4866,5084,272-36-832-596,25.0,1280,720,train +s3b6B6Vw2ZQ,494,858,184-64-661-541,29.97,1280,720,train +s3b6B6Vw2ZQ,2285,2468,438-93-888-543,29.97,1280,720,train +s3b6B6Vw2ZQ,3262,3440,379-77-851-549,29.97,1280,720,train +s3b6B6Vw2ZQ,3498,3808,292-96-749-553,29.97,1280,720,train +s3b6B6Vw2ZQ,3876,4009,202-82-672-552,29.97,1280,720,train +s3b6B6Vw2ZQ,4146,4371,292-110-726-544,29.97,1280,720,train +s3b6B6Vw2ZQ,4407,4562,275-98-715-538,29.97,1280,720,train +s3b6B6Vw2ZQ,4635,4943,286-108-723-545,29.97,1280,720,train +s3b6B6Vw2ZQ,4943,5075,357-88-796-527,29.97,1280,720,train +s3b6B6Vw2ZQ,5320,5573,290-79-751-540,29.97,1280,720,train +s3b6B6Vw2ZQ,6256,6458,410-95-851-536,29.97,1280,720,train +s3b6B6Vw2ZQ,6458,6604,498-88-938-528,29.97,1280,720,train +s3b6B6Vw2ZQ,7023,7281,497-87-939-529,29.97,1280,720,train +s3b6B6Vw2ZQ,7731,7916,551-79-1006-534,29.97,1280,720,train +s3b6B6Vw2ZQ,7943,8160,612-57-1077-522,29.97,1280,720,train +s3b6B6Vw2ZQ,8459,8640,278-31-827-580,29.97,1280,720,train +s3b6B6Vw2ZQ,9087,9262,86-62-628-604,29.97,1280,720,train +s3b6B6Vw2ZQ,9364,9500,185-99-664-578,29.97,1280,720,train +s3b6B6Vw2ZQ,9846,9982,134-110-592-568,29.97,1280,720,train +s3b6B6Vw2ZQ,10339,10639,327-113-791-577,29.97,1280,720,train +s3b6B6Vw2ZQ,10913,11278,287-67-791-571,29.97,1280,720,train +36SpweR9O3s,8618,8748,98-139-380-421,25.0,600,480,train +nAQEOC1Z10M,7750,7956,569-105-1130-666,59.94,1280,720,test +nAQEOC1Z10M,7999,8438,414-89-997-672,59.94,1280,720,test +nAQEOC1Z10M,8438,8572,339-61-925-647,59.94,1280,720,test +nAQEOC1Z10M,8572,8848,411-64-998-651,59.94,1280,720,test +nAQEOC1Z10M,8848,9055,445-36-1052-643,59.94,1280,720,test +nAQEOC1Z10M,9055,9434,370-96-919-645,59.94,1280,720,test +nAQEOC1Z10M,9434,9587,251-92-805-646,59.94,1280,720,test +nAQEOC1Z10M,10446,10614,309-92-868-651,59.94,1280,720,test +nAQEOC1Z10M,10776,10955,542-102-1085-645,59.94,1280,720,test +nAQEOC1Z10M,11006,11200,712-115-1258-661,59.94,1280,720,test +nAQEOC1Z10M,11244,11869,363-118-916-671,59.94,1280,720,test +nAQEOC1Z10M,11869,12265,259-123-784-648,59.94,1280,720,test +nAQEOC1Z10M,12265,12835,330-101-905-676,59.94,1280,720,test +nAQEOC1Z10M,12926,13159,315-112-852-649,59.94,1280,720,test +nAQEOC1Z10M,13257,13665,359-102-921-664,59.94,1280,720,test +nAQEOC1Z10M,13665,13832,215-102-770-657,59.94,1280,720,test +nAQEOC1Z10M,13891,14101,151-95-714-658,59.94,1280,720,test +nAQEOC1Z10M,14101,14461,48-104-605-661,59.94,1280,720,test +nAQEOC1Z10M,14730,15436,88-98-631-641,59.94,1280,720,test +nAQEOC1Z10M,15442,15583,19-108-522-611,59.94,1280,720,test +nAQEOC1Z10M,15624,15783,94-120-570-596,59.94,1280,720,test +nAQEOC1Z10M,16313,16524,266-151-761-646,59.94,1280,720,test +nAQEOC1Z10M,16558,16877,370-104-882-616,59.94,1280,720,test +nAQEOC1Z10M,17094,17364,594-177-1029-612,59.94,1280,720,test +nAQEOC1Z10M,17670,18082,419-131-931-643,59.94,1280,720,test +nAQEOC1Z10M,18236,18704,655-125-1192-662,59.94,1280,720,test +nAQEOC1Z10M,18798,19102,610-130-1136-656,59.94,1280,720,test +nAQEOC1Z10M,19102,19658,513-135-1050-672,59.94,1280,720,test +nAQEOC1Z10M,19658,19817,411-133-930-652,59.94,1280,720,test +nAQEOC1Z10M,19858,20177,556-141-1073-658,59.94,1280,720,test +nAQEOC1Z10M,20177,20600,492-115-1038-661,59.94,1280,720,test +nAQEOC1Z10M,20600,20777,557-116-1128-687,59.94,1280,720,test +5SRTl6JcQ8g,42,808,229-101-787-659,30.0,960,720,train +5SRTl6JcQ8g,927,1081,307-156-750-599,30.0,960,720,train +5SRTl6JcQ8g,2322,2464,180-143-679-642,30.0,960,720,train +5SRTl6JcQ8g,2730,2859,301-139-793-631,30.0,960,720,train +5SRTl6JcQ8g,5958,6251,219-80-785-646,30.0,960,720,train +5SRTl6JcQ8g,8002,8158,205-112-768-675,30.0,960,720,train +5SRTl6JcQ8g,9419,9551,227-117-768-658,30.0,960,720,train +5SRTl6JcQ8g,9661,9828,176-46-779-649,30.0,960,720,train +vNfhp02w9s0,0,275,92-30-521-459,25.0,640,480,train +vNfhp02w9s0,275,473,99-9-558-468,25.0,640,480,train +vNfhp02w9s0,526,957,136-16-580-460,25.0,640,480,train +vNfhp02w9s0,1115,1296,149-21-586-458,25.0,640,480,train +vNfhp02w9s0,1303,1586,165-13-626-474,25.0,640,480,train +vNfhp02w9s0,1676,1905,129-10-586-467,25.0,640,480,train +vNfhp02w9s0,2087,2349,113-9-571-467,25.0,640,480,train +vNfhp02w9s0,2452,2825,128-23-559-454,25.0,640,480,train +vNfhp02w9s0,3382,3925,54-49-454-449,25.0,640,480,train +vNfhp02w9s0,4066,4442,70-67-462-459,25.0,640,480,train +vNfhp02w9s0,4648,5130,87-29-514-456,25.0,640,480,train +vNfhp02w9s0,5130,5331,115-61-510-456,25.0,640,480,train +vNfhp02w9s0,5415,5670,121-11-579-469,25.0,640,480,train +vNfhp02w9s0,5670,5852,89-16-528-455,25.0,640,480,train +vNfhp02w9s0,5965,6412,137-17-585-465,25.0,640,480,train +vNfhp02w9s0,6732,6866,88-38-512-462,25.0,640,480,train +vNfhp02w9s0,6866,7091,138-14-578-454,25.0,640,480,train +vNfhp02w9s0,7133,7426,127-0-597-469,25.0,640,480,train +EaPwYQC-5pE,989,1191,377-57-985-665,29.97,1280,720,train +EaPwYQC-5pE,1191,1328,357-62-990-695,29.97,1280,720,train +EaPwYQC-5pE,1328,2300,497-86-1105-694,29.97,1280,720,train +EaPwYQC-5pE,2322,3346,370-117-874-621,29.97,1280,720,train +EaPwYQC-5pE,3677,3928,333-63-939-669,29.97,1280,720,train +EaPwYQC-5pE,3939,4526,292-58-887-653,29.97,1280,720,train +EaPwYQC-5pE,4562,4692,282-73-875-666,29.97,1280,720,train +EaPwYQC-5pE,4692,4849,445-75-1029-659,29.97,1280,720,train +EaPwYQC-5pE,4870,5075,385-77-968-660,29.97,1280,720,train +EaPwYQC-5pE,5282,5592,489-135-1014-660,29.97,1280,720,train +EaPwYQC-5pE,5602,6151,420-71-1017-668,29.97,1280,720,train +EaPwYQC-5pE,6238,6601,447-126-970-649,29.97,1280,720,train +EaPwYQC-5pE,6631,7182,479-107-1039-667,29.97,1280,720,train +EaPwYQC-5pE,7251,7407,398-60-996-658,29.97,1280,720,train +irQNFGmGRQQ,1350,1533,376-105-657-386,25.0,854,480,train +irQNFGmGRQQ,1559,1918,345-106-639-400,25.0,854,480,train +irQNFGmGRQQ,1918,2100,359-91-665-397,25.0,854,480,train +irQNFGmGRQQ,2184,2348,315-95-623-403,25.0,854,480,train +irQNFGmGRQQ,2379,2525,376-105-662-391,25.0,854,480,train +irQNFGmGRQQ,2662,2790,365-108-623-366,25.0,854,480,train +irQNFGmGRQQ,2819,2956,418-84-707-373,25.0,854,480,train +irQNFGmGRQQ,3122,3324,434-88-709-363,25.0,854,480,train +irQNFGmGRQQ,3818,4112,195-78-495-378,25.0,854,480,train +irQNFGmGRQQ,4422,4633,343-127-600-384,25.0,854,480,train +irQNFGmGRQQ,4667,5691,299-87-600-388,25.0,854,480,train +irQNFGmGRQQ,5990,6150,338-114-603-379,25.0,854,480,train +irQNFGmGRQQ,6986,7123,403-105-669-371,25.0,854,480,train +irQNFGmGRQQ,8158,8325,422-40-744-362,25.0,854,480,train +irQNFGmGRQQ,8536,8757,478-88-749-359,25.0,854,480,train +irQNFGmGRQQ,8776,8913,495-53-794-352,25.0,854,480,train +irQNFGmGRQQ,8916,9099,497-74-771-348,25.0,854,480,train +irQNFGmGRQQ,9320,9546,332-53-658-379,25.0,854,480,train +irQNFGmGRQQ,10512,10767,168-73-515-420,25.0,854,480,train +irQNFGmGRQQ,10877,11167,114-53-470-409,25.0,854,480,train +irQNFGmGRQQ,11405,11767,274-90-603-419,25.0,854,480,train +irQNFGmGRQQ,11887,12038,301-96-614-409,25.0,854,480,train +irQNFGmGRQQ,12068,12345,253-64-597-408,25.0,854,480,train +MGYF1aDwUKg,90,294,142-122-459-439,25.0,640,480,train +xlHGjYHyjmM,3102,3428,244-12-901-669,30.0,1280,720,train +xlHGjYHyjmM,3430,3580,389-58-1037-706,30.0,1280,720,train +xlHGjYHyjmM,3638,3771,328-42-972-686,30.0,1280,720,train +xlHGjYHyjmM,3771,4079,309-9-977-677,30.0,1280,720,train +xlHGjYHyjmM,4103,4242,403-9-1081-687,30.0,1280,720,train +xlHGjYHyjmM,4279,4496,326-33-974-681,30.0,1280,720,train +xlHGjYHyjmM,4719,5353,330-36-980-686,30.0,1280,720,train +xlHGjYHyjmM,5640,6064,295-17-966-688,30.0,1280,720,train +xlHGjYHyjmM,6321,6456,298-35-959-696,30.0,1280,720,train +xlHGjYHyjmM,7579,7762,317-23-983-689,30.0,1280,720,train +xlHGjYHyjmM,7762,7956,268-30-958-720,30.0,1280,720,train +xlHGjYHyjmM,8604,9595,339-15-1008-684,30.0,1280,720,train +xlHGjYHyjmM,11581,11725,321-26-999-704,30.0,1280,720,train +lybYf6hakUs,2593,2722,227-27-897-697,25.0,1280,720,train +lybYf6hakUs,4230,4438,250-8-932-690,25.0,1280,720,train +aAwbJ9MO91I,1200,2224,400-177-830-607,29.97,1280,720,train +aAwbJ9MO91I,2224,2711,402-178-826-602,29.97,1280,720,train +aAwbJ9MO91I,2804,2953,387-179-823-615,29.97,1280,720,train +aAwbJ9MO91I,3061,3208,391-182-821-612,29.97,1280,720,train +aAwbJ9MO91I,3318,3486,392-185-822-615,29.97,1280,720,train +aAwbJ9MO91I,3569,4029,385-180-826-621,29.97,1280,720,train +aAwbJ9MO91I,4770,4990,398-178-822-602,29.97,1280,720,train +aAwbJ9MO91I,5137,5465,384-173-824-613,29.97,1280,720,train +aAwbJ9MO91I,5508,5662,390-172-831-613,29.97,1280,720,train +aAwbJ9MO91I,5920,6137,405-188-818-601,29.97,1280,720,train +aAwbJ9MO91I,6240,6400,498-256-826-584,29.97,1280,720,train +aAwbJ9MO91I,6453,6812,473-213-849-589,29.97,1280,720,train +aAwbJ9MO91I,6812,6974,461-234-841-614,29.97,1280,720,train +aAwbJ9MO91I,7044,7230,473-242-817-586,29.97,1280,720,train +aAwbJ9MO91I,7287,7466,469-231-834-596,29.97,1280,720,train +aAwbJ9MO91I,7529,7674,476-241-822-587,29.97,1280,720,train +aAwbJ9MO91I,7790,8065,477-233-833-589,29.97,1280,720,train +aAwbJ9MO91I,8440,8607,480-261-808-589,29.97,1280,720,train +aAwbJ9MO91I,8899,9160,471-234-828-591,29.97,1280,720,train +aAwbJ9MO91I,9201,9423,453-230-830-607,29.97,1280,720,train +aAwbJ9MO91I,9458,10482,380-133-850-603,29.97,1280,720,train +aAwbJ9MO91I,10482,10993,400-184-824-608,29.97,1280,720,train +aAwbJ9MO91I,11043,11203,386-181-812-607,29.97,1280,720,train +aAwbJ9MO91I,11253,11381,397-180-818-601,29.97,1280,720,train +aAwbJ9MO91I,11443,11941,375-127-855-607,29.97,1280,720,train +aAwbJ9MO91I,11943,12093,361-127-834-600,29.97,1280,720,train +aAwbJ9MO91I,12147,12279,368-124-860-616,29.97,1280,720,train +aAwbJ9MO91I,13619,13771,452-238-802-588,29.97,1280,720,train +aAwbJ9MO91I,13785,13917,429-251-764-586,29.97,1280,720,train +aAwbJ9MO91I,14197,14338,415-237-767-589,29.97,1280,720,train +aAwbJ9MO91I,14498,14720,456-260-784-588,29.97,1280,720,train +aAwbJ9MO91I,14720,14925,494-257-826-589,29.97,1280,720,train +aAwbJ9MO91I,15040,15199,457-260-781-584,29.97,1280,720,train +aAwbJ9MO91I,15574,15767,500-263-822-585,29.97,1280,720,train +aAwbJ9MO91I,15795,16046,455-256-785-586,29.97,1280,720,train +aAwbJ9MO91I,16195,16490,491-260-818-587,29.97,1280,720,train +aAwbJ9MO91I,16490,16679,490-256-821-587,29.97,1280,720,train +aAwbJ9MO91I,16740,16950,460-256-787-583,29.97,1280,720,train +aAwbJ9MO91I,16951,17234,471-255-801-585,29.97,1280,720,train +aAwbJ9MO91I,18514,18665,401-163-835-597,29.97,1280,720,train +aAwbJ9MO91I,20234,20480,409-174-874-639,29.97,1280,720,train +aAwbJ9MO91I,21511,21660,481-255-833-607,29.97,1280,720,train +aAwbJ9MO91I,21788,21958,507-259-837-589,29.97,1280,720,train +aAwbJ9MO91I,23001,23143,482-255-834-607,29.97,1280,720,train +aAwbJ9MO91I,24614,24768,401-190-812-601,29.97,1280,720,train +aAwbJ9MO91I,24768,25270,388-169-818-599,29.97,1280,720,train +aAwbJ9MO91I,25271,25439,405-177-827-599,29.97,1280,720,train +aAwbJ9MO91I,28293,28430,478-257-803-582,29.97,1280,720,train +aAwbJ9MO91I,29027,29482,493-255-826-588,29.97,1280,720,train +aAwbJ9MO91I,29640,29779,460-248-810-598,29.97,1280,720,train +aAwbJ9MO91I,30757,30967,473-249-811-587,29.97,1280,720,train +aAwbJ9MO91I,31022,31562,476-251-810-585,29.97,1280,720,train +aAwbJ9MO91I,31613,31879,395-188-804-597,29.97,1280,720,train +aAwbJ9MO91I,32005,32251,397-183-809-595,29.97,1280,720,train +aAwbJ9MO91I,32252,33024,396-190-804-598,29.97,1280,720,train +aAwbJ9MO91I,33215,33410,401-183-812-594,29.97,1280,720,train +aAwbJ9MO91I,33432,34082,401-183-808-590,29.97,1280,720,train +aAwbJ9MO91I,34083,34548,402-182-807-587,29.97,1280,720,train +aAwbJ9MO91I,35071,35261,402-187-811-596,29.97,1280,720,train +aAwbJ9MO91I,35327,36351,405-182-822-599,29.97,1280,720,train +aAwbJ9MO91I,36351,36772,400-177-820-597,29.97,1280,720,train +aAwbJ9MO91I,39234,39398,407-180-816-589,29.97,1280,720,train +aAwbJ9MO91I,39966,40163,404-178-817-591,29.97,1280,720,train +aAwbJ9MO91I,40340,40640,406-179-817-590,29.97,1280,720,train +aAwbJ9MO91I,40664,40934,404-178-819-593,29.97,1280,720,train +aAwbJ9MO91I,40972,41876,403-178-820-595,29.97,1280,720,train +3M5VGsUtw_Q,1600,1750,244-55-896-707,29.97,1280,720,train +3M5VGsUtw_Q,2100,2250,354-54-1002-702,29.97,1280,720,train +3M5VGsUtw_Q,2522,2650,415-58-1075-718,29.97,1280,720,train +3M5VGsUtw_Q,2650,2841,328-55-988-715,29.97,1280,720,train +3M5VGsUtw_Q,8100,8240,420-135-996-711,29.97,1280,720,train +3M5VGsUtw_Q,11044,11220,484-191-1009-716,29.97,1280,720,train +3M5VGsUtw_Q,12038,12175,556-177-1083-704,29.97,1280,720,train +3M5VGsUtw_Q,12625,12784,308-193-831-716,29.97,1280,720,train +w8YcIAuUIys,613,844,363-112-899-648,25.0,1280,720,train +w8YcIAuUIys,844,1234,298-113-860-675,25.0,1280,720,train +4SgduoCb2_8,151,1175,491-117-1220-846,29.97,1920,1080,train +4SgduoCb2_8,1426,1565,532-186-1190-844,29.97,1920,1080,train +4SgduoCb2_8,1734,1894,553-181-1269-897,29.97,1920,1080,train +4SgduoCb2_8,2110,2466,515-204-1144-833,29.97,1920,1080,train +4SgduoCb2_8,2986,3737,519-156-1235-872,29.97,1920,1080,train +4SgduoCb2_8,4154,4302,508-190-1208-890,29.97,1920,1080,train +4SgduoCb2_8,4302,4480,564-148-1256-840,29.97,1920,1080,train +4SgduoCb2_8,4481,4916,513-191-1154-832,29.97,1920,1080,train +4SgduoCb2_8,5530,5834,518-145-1226-853,29.97,1920,1080,train +4SgduoCb2_8,5834,6016,439-135-1156-852,29.97,1920,1080,train +yKYjtfmyVGU,228,360,556-135-1425-1004,30.0,1920,1080,train +yKYjtfmyVGU,360,625,544-131-1413-1000,30.0,1920,1080,train +yKYjtfmyVGU,817,961,522-135-1405-1018,30.0,1920,1080,train +yKYjtfmyVGU,973,1105,463-118-1334-989,30.0,1920,1080,train +yKYjtfmyVGU,1429,1982,478-125-1377-1024,30.0,1920,1080,train +yKYjtfmyVGU,2097,2246,496-100-1412-1016,30.0,1920,1080,train +yKYjtfmyVGU,2258,2423,508-131-1374-997,30.0,1920,1080,train +yKYjtfmyVGU,2423,2943,614-107-1497-990,30.0,1920,1080,train +yKYjtfmyVGU,3063,3229,561-120-1466-1025,30.0,1920,1080,train +yKYjtfmyVGU,3351,3603,561-105-1441-985,30.0,1920,1080,train +yKYjtfmyVGU,3603,3749,493-138-1358-1003,30.0,1920,1080,train +yKYjtfmyVGU,3798,4072,653-80-1556-983,30.0,1920,1080,train +yKYjtfmyVGU,4264,4655,708-132-1522-946,30.0,1920,1080,train +yKYjtfmyVGU,4685,4865,638-107-1489-958,30.0,1920,1080,train +yKYjtfmyVGU,4973,5946,546-118-1382-954,30.0,1920,1080,train +yKYjtfmyVGU,6466,6606,658-108-1474-924,30.0,1920,1080,train +yKYjtfmyVGU,6727,6915,468-149-1257-938,30.0,1920,1080,train +yKYjtfmyVGU,6915,7327,628-100-1461-933,30.0,1920,1080,train +yKYjtfmyVGU,7447,7695,572-133-1375-936,30.0,1920,1080,train +yKYjtfmyVGU,7808,7988,635-131-1419-915,30.0,1920,1080,train +yKYjtfmyVGU,7988,8156,613-134-1419-940,30.0,1920,1080,train +yKYjtfmyVGU,8156,8348,574-107-1419-952,30.0,1920,1080,train +yKYjtfmyVGU,8468,8695,539-117-1364-942,30.0,1920,1080,train +yKYjtfmyVGU,8695,9033,716-111-1554-949,30.0,1920,1080,train +yKYjtfmyVGU,9405,9630,610-111-1419-920,30.0,1920,1080,train +yKYjtfmyVGU,9638,9790,485-123-1296-934,30.0,1920,1080,train +yKYjtfmyVGU,9790,9970,510-134-1298-922,30.0,1920,1080,train +yKYjtfmyVGU,10270,10450,568-121-1376-929,30.0,1920,1080,train +yKYjtfmyVGU,11167,11331,523-158-1338-973,30.0,1920,1080,train +yKYjtfmyVGU,11471,11712,504-140-1325-961,30.0,1920,1080,train +yKYjtfmyVGU,11789,12012,436-144-1268-976,30.0,1920,1080,train +yKYjtfmyVGU,12012,12142,617-152-1431-966,30.0,1920,1080,train +yKYjtfmyVGU,12145,12560,635-171-1413-949,30.0,1920,1080,train +yKYjtfmyVGU,12560,12718,739-132-1558-951,30.0,1920,1080,train +yKYjtfmyVGU,12979,13144,537-126-1352-941,30.0,1920,1080,train +yKYjtfmyVGU,13861,14510,647-87-1590-1030,30.0,1920,1080,train +yKYjtfmyVGU,14522,14943,648-142-1480-974,30.0,1920,1080,train +yKYjtfmyVGU,14955,15123,685-131-1524-970,30.0,1920,1080,train +yKYjtfmyVGU,15171,15314,666-114-1512-960,30.0,1920,1080,train +yKYjtfmyVGU,15533,15706,640-152-1439-951,30.0,1920,1080,train +yKYjtfmyVGU,16577,16745,628-128-1485-985,30.0,1920,1080,train +yKYjtfmyVGU,16997,17360,648-155-1436-943,30.0,1920,1080,train +yKYjtfmyVGU,17537,17838,545-117-1380-952,30.0,1920,1080,train +xmwGBXYofEE,705,960,259-114-859-714,29.97,1280,720,train +xmwGBXYofEE,960,1120,294-123-884-713,29.97,1280,720,train +xmwGBXYofEE,1180,1384,294-136-858-700,29.97,1280,720,train +xmwGBXYofEE,1920,2080,342-136-904-698,29.97,1280,720,train +xmwGBXYofEE,2162,2400,305-128-892-715,29.97,1280,720,train +xmwGBXYofEE,2560,2720,424-168-963-707,29.97,1280,720,train +xmwGBXYofEE,2720,2867,419-194-905-680,29.97,1280,720,train +xmwGBXYofEE,3040,3198,421-187-938-704,29.97,1280,720,train +xmwGBXYofEE,3384,3520,422-182-939-699,29.97,1280,720,train +xmwGBXYofEE,4640,4791,441-225-908-692,29.97,1280,720,train +xmwGBXYofEE,6497,6715,213-161-758-706,29.97,1280,720,train +xmwGBXYofEE,7313,7459,282-154-826-698,29.97,1280,720,train +xmwGBXYofEE,7459,7680,333-127-897-691,29.97,1280,720,train +xmwGBXYofEE,7680,8160,302-132-879-709,29.97,1280,720,train +xmwGBXYofEE,9445,9618,411-157-967-713,29.97,1280,720,train +xmwGBXYofEE,10405,10575,544-265-966-687,29.97,1280,720,train +xmwGBXYofEE,12164,12341,415-200-928-713,29.97,1280,720,train +xmwGBXYofEE,12820,12960,445-159-1002-716,29.97,1280,720,train +xmwGBXYofEE,13096,13264,334-140-895-701,29.97,1280,720,train +xmwGBXYofEE,13381,13631,214-163-761-710,29.97,1280,720,train +xmwGBXYofEE,13760,13920,231-66-856-691,29.97,1280,720,train +xmwGBXYofEE,13920,14056,228-105-816-693,29.97,1280,720,train +xmwGBXYofEE,14992,15188,316-159-870-713,29.97,1280,720,train +xmwGBXYofEE,15761,15942,392-136-942-686,29.97,1280,720,train +xmwGBXYofEE,15942,16144,421-162-938-679,29.97,1280,720,train +xmwGBXYofEE,16455,16676,400-209-877-686,29.97,1280,720,train +xmwGBXYofEE,16703,16880,498-185-998-685,29.97,1280,720,train +xmwGBXYofEE,17034,17280,541-231-982-672,29.97,1280,720,train +xmwGBXYofEE,17280,17423,591-212-1037-658,29.97,1280,720,train +xmwGBXYofEE,19640,19840,446-115-1025-694,29.97,1280,720,train +xmwGBXYofEE,20042,20205,501-201-973-673,29.97,1280,720,train +xmwGBXYofEE,20507,20674,445-189-969-713,29.97,1280,720,train +xmwGBXYofEE,20986,21170,446-168-998-720,29.97,1280,720,train +xmwGBXYofEE,21646,21806,271-153-832-714,29.97,1280,720,train +xmwGBXYofEE,21806,21945,306-125-885-704,29.97,1280,720,train +0JKfDKo-3DM,0,1024,376-188-861-673,29.97,1280,720,train +0JKfDKo-3DM,1024,2048,373-203-840-670,29.97,1280,720,train +0JKfDKo-3DM,2048,3072,332-194-807-669,29.97,1280,720,train +0JKfDKo-3DM,3072,4096,352-211-819-678,29.97,1280,720,train +0JKfDKo-3DM,4096,5120,357-209-823-675,29.97,1280,720,train +0JKfDKo-3DM,5120,6144,353-206-820-673,29.97,1280,720,train +0JKfDKo-3DM,6144,6500,359-205-823-669,29.97,1280,720,train +0JKfDKo-3DM,6500,7028,265-217-729-681,29.97,1280,720,train +0JKfDKo-3DM,7028,7522,351-202-826-677,29.97,1280,720,train +0JKfDKo-3DM,7522,8546,241-200-724-683,29.97,1280,720,train +0JKfDKo-3DM,8608,8862,236-206-710-680,29.97,1280,720,train +0JKfDKo-3DM,8862,9089,318-214-789-685,29.97,1280,720,train +0JKfDKo-3DM,9089,9631,186-221-647-682,29.97,1280,720,train +0JKfDKo-3DM,9631,10567,68-222-543-697,29.97,1280,720,train +0JKfDKo-3DM,10567,10874,202-228-655-681,29.97,1280,720,train +0JKfDKo-3DM,10874,11515,316-206-780-670,29.97,1280,720,train +0JKfDKo-3DM,11537,12300,349-211-812-674,29.97,1280,720,train +0JKfDKo-3DM,12302,12473,362-211-819-668,29.97,1280,720,train +0JKfDKo-3DM,12473,13497,222-217-680-675,29.97,1280,720,train +0JKfDKo-3DM,13654,14029,337-218-786-667,29.97,1280,720,train +0JKfDKo-3DM,14143,14588,208-224-650-666,29.97,1280,720,train +0JKfDKo-3DM,14627,15338,352-233-779-660,29.97,1280,720,train +0JKfDKo-3DM,15338,15549,276-236-705-665,29.97,1280,720,train +0JKfDKo-3DM,15682,16706,350-227-781-658,29.97,1280,720,train +0JKfDKo-3DM,16782,17147,300-232-745-677,29.97,1280,720,train +0JKfDKo-3DM,17147,18170,225-239-661-675,29.97,1280,720,train +0JKfDKo-3DM,18170,19101,291-223-744-676,29.97,1280,720,train +0JKfDKo-3DM,19101,20125,204-232-649-677,29.97,1280,720,train +0JKfDKo-3DM,20125,20374,225-229-665-669,29.97,1280,720,train +rKilyW_awUo,1458,1774,435-162-1349-1076,29.97,1920,1080,train +rKilyW_awUo,1780,1949,515-163-1395-1043,29.97,1920,1080,train +rKilyW_awUo,2228,2389,439-93-1314-968,29.97,1920,1080,train +rKilyW_awUo,2440,2823,585-108-1547-1070,29.97,1920,1080,train +rKilyW_awUo,2823,3063,421-149-1294-1022,29.97,1920,1080,train +rKilyW_awUo,3121,3381,422-134-1341-1053,29.97,1920,1080,train +rKilyW_awUo,3802,4043,464-132-1368-1036,29.97,1920,1080,train +rKilyW_awUo,4302,4673,471-127-1417-1073,29.97,1920,1080,train +rKilyW_awUo,6925,7605,467-4-1539-1076,29.97,1920,1080,train +rKilyW_awUo,7622,7778,467-85-1458-1076,29.97,1920,1080,train +rKilyW_awUo,7849,8024,513-126-1455-1068,29.97,1920,1080,train +rKilyW_awUo,8024,8209,350-157-1229-1036,29.97,1920,1080,train +rKilyW_awUo,8483,8795,475-135-1390-1050,29.97,1920,1080,train +rKilyW_awUo,9031,9180,370-80-1319-1029,29.97,1920,1080,train +rKilyW_awUo,9832,9968,524-167-1393-1036,29.97,1920,1080,train +rKilyW_awUo,10043,10191,359-157-1263-1061,29.97,1920,1080,train +rKilyW_awUo,10853,11314,596-122-1548-1074,29.97,1920,1080,train +jvwHcYbcyEU,16862,17082,458-107-1417-1066,29.97,1920,1080,train +ZjN0kL_SVJw,484,1026,486-99-1448-1061,29.97,1920,1080,train +ZjN0kL_SVJw,1430,1791,499-105-1453-1059,29.97,1920,1080,train +ZjN0kL_SVJw,1854,2092,496-87-1441-1032,29.97,1920,1080,train +ZjN0kL_SVJw,2608,2902,483-138-1423-1078,29.97,1920,1080,train +ZjN0kL_SVJw,4107,4453,501-116-1428-1043,29.97,1920,1080,train +ZjN0kL_SVJw,4640,4787,592-132-1446-986,29.97,1920,1080,train +ZjN0kL_SVJw,5007,5400,580-104-1516-1040,29.97,1920,1080,train +ZjN0kL_SVJw,5504,5765,625-139-1489-1003,29.97,1920,1080,train +ZjN0kL_SVJw,6068,6560,555-110-1460-1015,29.97,1920,1080,train +ZjN0kL_SVJw,6560,6708,557-64-1496-1003,29.97,1920,1080,train +ZjN0kL_SVJw,6978,7219,409-64-1398-1053,29.97,1920,1080,train +ZjN0kL_SVJw,7726,7867,428-169-1252-993,29.97,1920,1080,train +ZjN0kL_SVJw,8080,8216,521-166-1345-990,29.97,1920,1080,train +ZjN0kL_SVJw,8216,8506,586-123-1450-987,29.97,1920,1080,train +ZjN0kL_SVJw,9341,10027,523-128-1419-1024,29.97,1920,1080,train +ZjN0kL_SVJw,10645,10851,653-132-1555-1034,29.97,1920,1080,train +ZjN0kL_SVJw,11510,11645,406-135-1307-1036,29.97,1920,1080,train +ZjN0kL_SVJw,11818,12033,498-108-1365-975,29.97,1920,1080,train +ZjN0kL_SVJw,12558,12813,618-121-1502-1005,29.97,1920,1080,train +ZjN0kL_SVJw,13887,14539,466-102-1400-1036,29.97,1920,1080,train +ZjN0kL_SVJw,14632,14877,509-69-1478-1038,29.97,1920,1080,train +ZjN0kL_SVJw,15009,15188,467-112-1329-974,29.97,1920,1080,train +ZjN0kL_SVJw,15790,16165,488-52-1488-1052,29.97,1920,1080,train +ZjN0kL_SVJw,16515,16797,575-88-1481-994,29.97,1920,1080,train +ZjN0kL_SVJw,16918,17090,481-109-1382-1010,29.97,1920,1080,train +ZjN0kL_SVJw,17706,18370,477-109-1388-1020,29.97,1920,1080,train +ZjN0kL_SVJw,18864,19106,628-182-1450-1004,29.97,1920,1080,train +ZjN0kL_SVJw,19248,19483,626-187-1431-992,29.97,1920,1080,train +ZjN0kL_SVJw,19574,19705,700-154-1523-977,29.97,1920,1080,train +ZjN0kL_SVJw,19995,20148,613-200-1412-999,29.97,1920,1080,train +ZjN0kL_SVJw,20148,20438,596-214-1423-1041,29.97,1920,1080,train +ZjN0kL_SVJw,20754,21251,526-182-1375-1031,29.97,1920,1080,train +ZjN0kL_SVJw,21463,21675,575-203-1396-1024,29.97,1920,1080,train +ZjN0kL_SVJw,21978,22118,565-206-1431-1072,29.97,1920,1080,train +ZjN0kL_SVJw,22364,22549,564-144-1419-999,29.97,1920,1080,train +ZjN0kL_SVJw,22627,22805,430-123-1344-1037,29.97,1920,1080,train +ZjN0kL_SVJw,23025,23480,440-94-1351-1005,29.97,1920,1080,train +ZjN0kL_SVJw,23672,24011,490-60-1457-1027,29.97,1920,1080,train +ZjN0kL_SVJw,24369,24544,500-150-1382-1032,29.97,1920,1080,train +ZjN0kL_SVJw,25051,25218,498-149-1376-1027,29.97,1920,1080,train +ZjN0kL_SVJw,26621,26807,473-111-1367-1005,29.97,1920,1080,train +ZjN0kL_SVJw,27127,27435,347-60-1277-990,29.97,1920,1080,train +ZjN0kL_SVJw,27740,28028,574-84-1531-1041,29.97,1920,1080,train +ZjN0kL_SVJw,28375,28601,423-77-1311-965,29.97,1920,1080,train +ZjN0kL_SVJw,28832,28963,319-64-1310-1055,29.97,1920,1080,train +ZjN0kL_SVJw,29329,29545,371-83-1337-1049,29.97,1920,1080,train +ZjN0kL_SVJw,29652,29809,543-75-1518-1050,29.97,1920,1080,train +ZjN0kL_SVJw,30249,30753,469-75-1428-1034,29.97,1920,1080,train +ZjN0kL_SVJw,31021,31151,551-84-1470-1003,29.97,1920,1080,train +ZjN0kL_SVJw,31176,31444,449-60-1400-1011,29.97,1920,1080,train +ZjN0kL_SVJw,31616,31947,386-94-1339-1047,29.97,1920,1080,train +ZjN0kL_SVJw,31977,32248,671-75-1618-1022,29.97,1920,1080,train +ZjN0kL_SVJw,32391,32575,485-140-1359-1014,29.97,1920,1080,train +ZjN0kL_SVJw,32575,32830,468-142-1383-1057,29.97,1920,1080,train +ZjN0kL_SVJw,32830,33120,480-99-1405-1024,29.97,1920,1080,train +ZjN0kL_SVJw,33497,33680,414-102-1315-1003,29.97,1920,1080,train +ZjN0kL_SVJw,33975,34175,571-177-1418-1024,29.97,1920,1080,train +ZjN0kL_SVJw,34260,34450,461-172-1345-1056,29.97,1920,1080,train +ZjN0kL_SVJw,34592,35099,470-96-1401-1027,29.97,1920,1080,train +ZjN0kL_SVJw,35586,35735,436-93-1387-1044,29.97,1920,1080,train +ZjN0kL_SVJw,35735,35919,655-156-1533-1034,29.97,1920,1080,train +ZjN0kL_SVJw,36098,36243,569-177-1425-1033,29.97,1920,1080,train +ZjN0kL_SVJw,36631,36898,426-109-1392-1075,29.97,1920,1080,train +ZjN0kL_SVJw,37391,37543,407-86-1373-1052,29.97,1920,1080,train +ZjN0kL_SVJw,37842,38109,464-103-1420-1059,29.97,1920,1080,train +ZjN0kL_SVJw,38109,38345,416-59-1369-1012,29.97,1920,1080,train +ZjN0kL_SVJw,39026,39156,540-174-1410-1044,29.97,1920,1080,train +ZjN0kL_SVJw,39519,39951,511-160-1351-1000,29.97,1920,1080,train +ZjN0kL_SVJw,40298,40634,500-155-1339-994,29.97,1920,1080,train +ZjN0kL_SVJw,40681,40819,525-129-1408-1012,29.97,1920,1080,train +ZjN0kL_SVJw,41084,41399,600-69-1536-1005,29.97,1920,1080,train +ZjN0kL_SVJw,42242,42392,550-143-1439-1032,29.97,1920,1080,train +ZjN0kL_SVJw,42392,42947,437-101-1348-1012,29.97,1920,1080,train +ZjN0kL_SVJw,42947,43160,464-122-1405-1063,29.97,1920,1080,train +ZjN0kL_SVJw,43560,43764,499-131-1394-1026,29.97,1920,1080,train +ZjN0kL_SVJw,44320,44604,446-65-1420-1039,29.97,1920,1080,train +ZjN0kL_SVJw,44822,44998,441-106-1380-1045,29.97,1920,1080,train +ZjN0kL_SVJw,45217,45352,474-25-1468-1019,29.97,1920,1080,train +ZjN0kL_SVJw,45463,45647,470-74-1425-1029,29.97,1920,1080,train +ZjN0kL_SVJw,45647,46118,516-27-1507-1018,29.97,1920,1080,train +ZjN0kL_SVJw,47084,47212,459-116-1422-1079,29.97,1920,1080,train +ZjN0kL_SVJw,47552,47970,411-59-1408-1056,29.97,1920,1080,train +ZjN0kL_SVJw,48147,48725,436-30-1445-1039,29.97,1920,1080,train +ZjN0kL_SVJw,48725,49050,682-81-1606-1005,29.97,1920,1080,train +ZjN0kL_SVJw,49753,49894,488-103-1403-1018,29.97,1920,1080,train +ZjN0kL_SVJw,49910,50138,467-56-1464-1053,29.97,1920,1080,train +ZjN0kL_SVJw,50507,50801,579-253-1280-954,29.97,1920,1080,train +ZjN0kL_SVJw,51795,52079,489-79-1412-1002,29.97,1920,1080,train +ZjN0kL_SVJw,52313,52812,478-177-1317-1016,29.97,1920,1080,train +ZjN0kL_SVJw,52968,53141,496-127-1334-965,29.97,1920,1080,train +ZjN0kL_SVJw,54177,54583,440-51-1442-1053,29.97,1920,1080,train +ZjN0kL_SVJw,54984,55128,543-97-1415-969,29.97,1920,1080,train +ZjN0kL_SVJw,55256,55425,520-190-1311-981,29.97,1920,1080,train +ZjN0kL_SVJw,56476,57029,471-56-1425-1010,29.97,1920,1080,train +ZjN0kL_SVJw,57533,57678,667-70-1622-1025,29.97,1920,1080,train +ZjN0kL_SVJw,57794,58094,500-63-1455-1018,29.97,1920,1080,train +ZjN0kL_SVJw,59433,59619,473-90-1394-1011,29.97,1920,1080,train +ZjN0kL_SVJw,60005,60236,445-41-1407-1003,29.97,1920,1080,train +ZjN0kL_SVJw,60236,60964,477-116-1411-1050,29.97,1920,1080,train +ZjN0kL_SVJw,61106,61308,558-107-1459-1008,29.97,1920,1080,train +ZjN0kL_SVJw,62100,62696,440-35-1476-1071,29.97,1920,1080,train +ZjN0kL_SVJw,63604,63847,534-125-1319-910,29.97,1920,1080,train +ZjN0kL_SVJw,64809,65833,483-70-1452-1039,29.97,1920,1080,train +ZjN0kL_SVJw,66046,66329,551-136-1383-968,29.97,1920,1080,train +ZjN0kL_SVJw,66965,67124,595-118-1481-1004,29.97,1920,1080,train +ZjN0kL_SVJw,67155,68081,526-117-1420-1011,29.97,1920,1080,train +ZjN0kL_SVJw,68179,68369,525-112-1376-963,29.97,1920,1080,train +ZjN0kL_SVJw,68494,68671,446-39-1445-1038,29.97,1920,1080,train +ZjN0kL_SVJw,68930,69343,434-46-1436-1048,29.97,1920,1080,train +ZjN0kL_SVJw,69642,69859,535-127-1446-1038,29.97,1920,1080,train +ZjN0kL_SVJw,70223,70396,640-199-1425-984,29.97,1920,1080,train +ZjN0kL_SVJw,72195,72374,598-186-1369-957,29.97,1920,1080,train +ZjN0kL_SVJw,72374,72640,589-146-1409-966,29.97,1920,1080,train +ZjN0kL_SVJw,73008,73137,526-212-1273-959,29.97,1920,1080,train +ZjN0kL_SVJw,73240,73380,673-267-1359-953,29.97,1920,1080,train +ZjN0kL_SVJw,73644,73779,556-164-1354-962,29.97,1920,1080,train +ZjN0kL_SVJw,74163,74487,502-156-1353-1007,29.97,1920,1080,train +ZjN0kL_SVJw,74693,74823,472-98-1355-981,29.97,1920,1080,train +ZjN0kL_SVJw,77783,78105,293-184-1024-915,29.97,1920,1080,train +ZjN0kL_SVJw,84420,84673,435-196-1163-924,29.97,1920,1080,train +ZjN0kL_SVJw,84901,85161,470-71-1437-1038,29.97,1920,1080,train +ZjN0kL_SVJw,85253,85513,423-19-1467-1063,29.97,1920,1080,train +ZjN0kL_SVJw,87050,88074,492-82-1417-1007,29.97,1920,1080,train +ZjN0kL_SVJw,88509,88751,529-161-1366-998,29.97,1920,1080,train +ZjN0kL_SVJw,88883,89056,579-176-1404-1001,29.97,1920,1080,train +ZjN0kL_SVJw,89056,89290,569-132-1437-1000,29.97,1920,1080,train +ZjN0kL_SVJw,89290,89565,537-177-1399-1039,29.97,1920,1080,train +ZjN0kL_SVJw,89567,89891,527-123-1433-1029,29.97,1920,1080,train +ZjN0kL_SVJw,90851,91195,570-178-1396-1004,29.97,1920,1080,train +ZjN0kL_SVJw,91442,91871,472-146-1326-1000,29.97,1920,1080,train +ZjN0kL_SVJw,92039,92215,581-120-1489-1028,29.97,1920,1080,train +ZjN0kL_SVJw,92223,92364,481-31-1450-1000,29.97,1920,1080,train +ZjN0kL_SVJw,93338,93652,464-52-1467-1055,29.97,1920,1080,train +ZjN0kL_SVJw,93653,93893,565-98-1479-1012,29.97,1920,1080,train +ZjN0kL_SVJw,93935,94135,468-109-1365-1006,29.97,1920,1080,train +ZjN0kL_SVJw,94183,94754,381-46-1352-1017,29.97,1920,1080,train +ZjN0kL_SVJw,94967,95169,468-101-1371-1004,29.97,1920,1080,train +ZjN0kL_SVJw,95169,95467,477-55-1446-1024,29.97,1920,1080,train +ZjN0kL_SVJw,95854,96270,481-82-1439-1040,29.97,1920,1080,train +ZjN0kL_SVJw,96581,97075,493-78-1456-1041,29.97,1920,1080,train +ZjN0kL_SVJw,97442,97937,457-67-1459-1069,29.97,1920,1080,train +ZjN0kL_SVJw,98039,98628,418-92-1362-1036,29.97,1920,1080,train +FBuF0xOal9M,2445,2573,381-299-1150-1068,59.94,1920,1080,test +FBuF0xOal9M,2697,2840,655-362-1362-1069,59.94,1920,1080,test +FBuF0xOal9M,5168,5451,549-253-1357-1061,59.94,1920,1080,test +FBuF0xOal9M,6989,7146,565-239-1404-1078,59.94,1920,1080,test +FBuF0xOal9M,8657,8880,497-321-1251-1075,59.94,1920,1080,test +FBuF0xOal9M,11184,11359,632-253-1452-1073,59.94,1920,1080,test +FBuF0xOal9M,12781,12914,569-338-1303-1072,59.94,1920,1080,test +FBuF0xOal9M,12914,13183,664-320-1399-1055,59.94,1920,1080,test +FBuF0xOal9M,13311,13440,584-363-1290-1069,59.94,1920,1080,test +FBuF0xOal9M,13521,13680,536-275-1333-1072,59.94,1920,1080,test +FBuF0xOal9M,14632,14977,459-87-1441-1069,59.94,1920,1080,test +FBuF0xOal9M,15166,15296,761-317-1507-1063,59.94,1920,1080,test +FBuF0xOal9M,16204,16425,614-198-1423-1007,59.94,1920,1080,test +FBuF0xOal9M,16845,17032,467-213-1255-1001,59.94,1920,1080,test +FBuF0xOal9M,18334,18480,510-240-1262-992,59.94,1920,1080,test +FBuF0xOal9M,18515,18662,643-279-1341-977,59.94,1920,1080,test +FBuF0xOal9M,18892,19332,476-208-1280-1012,59.94,1920,1080,test +FBuF0xOal9M,19516,19645,585-268-1347-1030,59.94,1920,1080,test +FBuF0xOal9M,19762,19920,494-242-1258-1006,59.94,1920,1080,test +FBuF0xOal9M,20546,20880,531-180-1359-1008,59.94,1920,1080,test +FBuF0xOal9M,21432,21717,629-231-1424-1026,59.94,1920,1080,test +FBuF0xOal9M,22427,22560,692-223-1518-1049,59.94,1920,1080,test +FBuF0xOal9M,23027,23171,617-250-1371-1004,59.94,1920,1080,test +FBuF0xOal9M,23520,23680,379-157-1268-1046,59.94,1920,1080,test +FBuF0xOal9M,25129,25327,427-112-1360-1045,59.94,1920,1080,test +FBuF0xOal9M,25690,25859,429-235-1223-1029,59.94,1920,1080,test +FBuF0xOal9M,25920,26069,397-237-1181-1021,59.94,1920,1080,test +FBuF0xOal9M,26990,27198,599-219-1436-1056,59.94,1920,1080,test +FBuF0xOal9M,27360,27641,466-353-1154-1041,59.94,1920,1080,test +FBuF0xOal9M,27676,27831,299-279-1063-1043,59.94,1920,1080,test +FBuF0xOal9M,27831,27985,525-314-1259-1048,59.94,1920,1080,test +FBuF0xOal9M,28148,28335,557-284-1309-1036,59.94,1920,1080,test +FBuF0xOal9M,28446,28683,597-311-1339-1053,59.94,1920,1080,test +FBuF0xOal9M,28903,29040,632-213-1490-1071,59.94,1920,1080,test +FBuF0xOal9M,29146,29407,515-252-1342-1079,59.94,1920,1080,test +FBuF0xOal9M,29520,29741,574-230-1357-1013,59.94,1920,1080,test +FBuF0xOal9M,30073,30214,458-228-1252-1022,59.94,1920,1080,test +FBuF0xOal9M,30241,30452,560-267-1307-1014,59.94,1920,1080,test +FBuF0xOal9M,30639,30929,443-244-1243-1044,59.94,1920,1080,test +FBuF0xOal9M,30929,31220,588-244-1358-1014,59.94,1920,1080,test +FBuF0xOal9M,31760,31920,535-127-1418-1010,59.94,1920,1080,test +FBuF0xOal9M,31920,32073,478-277-1209-1008,59.94,1920,1080,test +FBuF0xOal9M,32312,32523,609-299-1316-1006,59.94,1920,1080,test +FBuF0xOal9M,32611,32740,599-234-1371-1006,59.94,1920,1080,test +FBuF0xOal9M,32744,32946,469-235-1220-986,59.94,1920,1080,test +FBuF0xOal9M,33424,33581,680-241-1437-998,59.94,1920,1080,test +FBuF0xOal9M,33930,34077,919-208-1703-992,59.94,1920,1080,test +FBuF0xOal9M,37437,37599,560-209-1356-1005,59.94,1920,1080,test +FBuF0xOal9M,37737,37992,511-277-1244-1010,59.94,1920,1080,test +FBuF0xOal9M,38269,38400,540-234-1313-1007,59.94,1920,1080,test +FBuF0xOal9M,40260,40473,568-294-1284-1010,59.94,1920,1080,test +FBuF0xOal9M,42017,42216,462-132-1370-1040,59.94,1920,1080,test +FBuF0xOal9M,42959,43131,666-256-1430-1020,59.94,1920,1080,test +FBuF0xOal9M,43327,43575,529-151-1410-1032,59.94,1920,1080,test +FBuF0xOal9M,44165,44384,553-205-1388-1040,59.94,1920,1080,test +FBuF0xOal9M,45663,46051,487-178-1294-985,59.94,1920,1080,test +FBuF0xOal9M,46320,46458,620-191-1453-1024,59.94,1920,1080,test +FBuF0xOal9M,46555,46733,558-217-1340-999,59.94,1920,1080,test +FBuF0xOal9M,46824,47542,509-108-1421-1020,59.94,1920,1080,test diff --git a/firstordermodel/data/taichi256.csv b/firstordermodel/data/taichi256.csv new file mode 100644 index 0000000..da1ad51 --- /dev/null +++ b/firstordermodel/data/taichi256.csv @@ -0,0 +1,51 @@ +distance,source,driving,frame +3.54437869822485,ab28GAufK8o#000261#000596.mp4,aDyyTMUBoLE#000164#000351.mp4,0 +2.8639053254437887,DMEaUoA8EPE#000028#000354.mp4,0Q914by5A98#010440#010764.mp4,0 +2.153846153846153,L82WHgYRq6I#000021#000479.mp4,0Q914by5A98#010440#010764.mp4,0 +2.8994082840236666,oNkBx4CZuEg#000000#001024.mp4,DMEaUoA8EPE#000028#000354.mp4,0 +3.3905325443786998,ab28GAufK8o#000261#000596.mp4,uEqWZ9S_-Lw#000089#000581.mp4,0 +3.266272189349112,0Q914by5A98#010440#010764.mp4,ab28GAufK8o#000261#000596.mp4,0 +2.7514792899408294,WlDYrq8K6nk#008186#008512.mp4,OiblkvkAHWM#014331#014459.mp4,0 +3.0177514792899407,oNkBx4CZuEg#001024#002048.mp4,aDyyTMUBoLE#000375#000518.mp4,0 +3.4792899408284064,aDyyTMUBoLE#000164#000351.mp4,w2awOCDRtrc#001729#002009.mp4,0 +2.769230769230769,oNkBx4CZuEg#000000#001024.mp4,L82WHgYRq6I#000021#000479.mp4,0 +3.8047337278106514,ab28GAufK8o#000261#000596.mp4,w2awOCDRtrc#001729#002009.mp4,0 +3.4260355029585763,w2awOCDRtrc#001729#002009.mp4,oNkBx4CZuEg#000000#001024.mp4,0 +3.313609467455621,DMEaUoA8EPE#000028#000354.mp4,WlDYrq8K6nk#005943#006135.mp4,0 +3.8402366863905333,oNkBx4CZuEg#001024#002048.mp4,ab28GAufK8o#000261#000596.mp4,0 +3.3254437869822504,aDyyTMUBoLE#000164#000351.mp4,oNkBx4CZuEg#000000#001024.mp4,0 +1.2485207100591724,0Q914by5A98#010440#010764.mp4,aDyyTMUBoLE#000164#000351.mp4,0 +3.804733727810652,OiblkvkAHWM#006251#006533.mp4,aDyyTMUBoLE#000375#000518.mp4,0 +3.662721893491124,uEqWZ9S_-Lw#000089#000581.mp4,DMEaUoA8EPE#000028#000354.mp4,0 +3.230769230769233,A3ZmT97hAWU#000095#000678.mp4,ab28GAufK8o#000261#000596.mp4,0 +3.3668639053254434,w81Tr0Dp1K8#015329#015485.mp4,WlDYrq8K6nk#008186#008512.mp4,0 +3.313609467455621,WlDYrq8K6nk#005943#006135.mp4,DMEaUoA8EPE#000028#000354.mp4,0 +2.7514792899408294,OiblkvkAHWM#014331#014459.mp4,WlDYrq8K6nk#008186#008512.mp4,0 +1.964497041420118,L82WHgYRq6I#000021#000479.mp4,DMEaUoA8EPE#000028#000354.mp4,0 +3.78698224852071,FBuF0xOal9M#046824#047542.mp4,lCb5w6n8kPs#011879#012014.mp4,0 +3.92307692307692,ab28GAufK8o#000261#000596.mp4,L82WHgYRq6I#000021#000479.mp4,0 +3.8402366863905333,ab28GAufK8o#000261#000596.mp4,oNkBx4CZuEg#001024#002048.mp4,0 +3.828402366863905,ab28GAufK8o#000261#000596.mp4,OiblkvkAHWM#006251#006533.mp4,0 +2.041420118343196,L82WHgYRq6I#000021#000479.mp4,aDyyTMUBoLE#000164#000351.mp4,0 +3.2485207100591724,0Q914by5A98#010440#010764.mp4,w2awOCDRtrc#001729#002009.mp4,0 +3.2485207100591746,oNkBx4CZuEg#000000#001024.mp4,0Q914by5A98#010440#010764.mp4,0 +1.964497041420118,DMEaUoA8EPE#000028#000354.mp4,L82WHgYRq6I#000021#000479.mp4,0 +3.5266272189349115,kgvcI9oe3NI#001578#001763.mp4,lCb5w6n8kPs#004451#004631.mp4,0 +3.005917159763317,A3ZmT97hAWU#000095#000678.mp4,0Q914by5A98#010440#010764.mp4,0 +3.230769230769233,ab28GAufK8o#000261#000596.mp4,A3ZmT97hAWU#000095#000678.mp4,0 +3.5266272189349115,lCb5w6n8kPs#004451#004631.mp4,kgvcI9oe3NI#001578#001763.mp4,0 +2.769230769230769,L82WHgYRq6I#000021#000479.mp4,oNkBx4CZuEg#000000#001024.mp4,0 +3.165680473372782,WlDYrq8K6nk#005943#006135.mp4,w81Tr0Dp1K8#001375#001516.mp4,0 +2.8994082840236666,DMEaUoA8EPE#000028#000354.mp4,oNkBx4CZuEg#000000#001024.mp4,0 +2.4556213017751523,0Q914by5A98#010440#010764.mp4,mndSqTrxpts#000000#000175.mp4,0 +2.201183431952659,A3ZmT97hAWU#000095#000678.mp4,VMSqvTE90hk#007168#007312.mp4,0 +3.8047337278106514,w2awOCDRtrc#001729#002009.mp4,ab28GAufK8o#000261#000596.mp4,0 +3.769230769230769,uEqWZ9S_-Lw#000089#000581.mp4,0Q914by5A98#010440#010764.mp4,0 +3.6568047337278102,A3ZmT97hAWU#000095#000678.mp4,aDyyTMUBoLE#000164#000351.mp4,0 +3.7869822485207107,uEqWZ9S_-Lw#000089#000581.mp4,L82WHgYRq6I#000021#000479.mp4,0 +3.78698224852071,lCb5w6n8kPs#011879#012014.mp4,FBuF0xOal9M#046824#047542.mp4,0 +3.591715976331361,nAQEOC1Z10M#020177#020600.mp4,w81Tr0Dp1K8#004036#004218.mp4,0 +3.8757396449704156,uEqWZ9S_-Lw#000089#000581.mp4,aDyyTMUBoLE#000164#000351.mp4,0 +2.45562130177515,aDyyTMUBoLE#000164#000351.mp4,DMEaUoA8EPE#000028#000354.mp4,0 +3.5502958579881647,uEqWZ9S_-Lw#000089#000581.mp4,OiblkvkAHWM#006251#006533.mp4,0 +3.7928994082840224,aDyyTMUBoLE#000375#000518.mp4,ab28GAufK8o#000261#000596.mp4,0 diff --git a/firstordermodel/demo.ipynb b/firstordermodel/demo.ipynb new file mode 100644 index 0000000..00d0ec0 --- /dev/null +++ b/firstordermodel/demo.ipynb @@ -0,0 +1,543 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "first-order-model-demo", + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open", + "\"Kaggle\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cdO_RxQZLahB" + }, + "source": [ + "# Demo for paper \"First Order Motion Model for Image Animation\"\n", + "To try the demo, press the 2 play buttons in order and scroll to the bottom. Note that it may take several minutes to load." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "UCMFMJV7K-ag" + }, + "source": [ + "%%capture\n", + "%pip install ffmpeg-python imageio-ffmpeg\n", + "!git init .\n", + "!git remote add origin https://github.com/AliaksandrSiarohin/first-order-model\n", + "!git pull origin master\n", + "!git clone https://github.com/graphemecluster/first-order-model-demo demo" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "Oxi6-riLOgnm" + }, + "source": [ + "import IPython.display\n", + "import PIL.Image\n", + "import cv2\n", + "import ffmpeg\n", + "import imageio\n", + "import io\n", + "import ipywidgets\n", + "import numpy\n", + "import os.path\n", + "import requests\n", + "import skimage.transform\n", + "import warnings\n", + "from base64 import b64encode\n", + "from demo import load_checkpoints, make_animation # type: ignore (local file)\n", + "from google.colab import files, output\n", + "from IPython.display import HTML, Javascript\n", + "from shutil import copyfileobj\n", + "from skimage import img_as_ubyte\n", + "from tempfile import NamedTemporaryFile\n", + "from tqdm.auto import tqdm\n", + "warnings.filterwarnings(\"ignore\")\n", + "os.makedirs(\"user\", exist_ok=True)\n", + "\n", + "display(HTML(\"\"\"\n", + "\n", + "\"\"\"))\n", + "\n", + "def thumbnail(file):\n", + "\treturn imageio.get_reader(file, mode='I', format='FFMPEG').get_next_data()\n", + "\n", + "def create_image(i, j):\n", + "\timage_widget = ipywidgets.Image.from_file('demo/images/%d%d.png' % (i, j))\n", + "\timage_widget.add_class('resource')\n", + "\timage_widget.add_class('resource-image')\n", + "\timage_widget.add_class('resource-image%d%d' % (i, j))\n", + "\treturn image_widget\n", + "\n", + "def create_video(i):\n", + "\tvideo_widget = ipywidgets.Image(\n", + "\t\tvalue=cv2.imencode('.png', cv2.cvtColor(thumbnail('demo/videos/%d.mp4' % i), cv2.COLOR_RGB2BGR))[1].tostring(),\n", + "\t\tformat='png'\n", + "\t)\n", + "\tvideo_widget.add_class('resource')\n", + "\tvideo_widget.add_class('resource-video')\n", + "\tvideo_widget.add_class('resource-video%d' % i)\n", + "\treturn video_widget\n", + "\n", + "def create_title(title):\n", + "\ttitle_widget = ipywidgets.Label(title)\n", + "\ttitle_widget.add_class('title')\n", + "\treturn title_widget\n", + "\n", + "def download_output(button):\n", + "\tcomplete.layout.display = 'none'\n", + "\tloading.layout.display = ''\n", + "\tfiles.download('output.mp4')\n", + "\tloading.layout.display = 'none'\n", + "\tcomplete.layout.display = ''\n", + "\n", + "def convert_output(button):\n", + "\tcomplete.layout.display = 'none'\n", + "\tloading.layout.display = ''\n", + "\tffmpeg.input('output.mp4').output('scaled.mp4', vf='scale=1080x1080:flags=lanczos,pad=1920:1080:420:0').overwrite_output().run()\n", + "\tfiles.download('scaled.mp4')\n", + "\tloading.layout.display = 'none'\n", + "\tcomplete.layout.display = ''\n", + "\n", + "def back_to_main(button):\n", + "\tcomplete.layout.display = 'none'\n", + "\tmain.layout.display = ''\n", + "\n", + "label_or = ipywidgets.Label('or')\n", + "label_or.add_class('label-or')\n", + "\n", + "image_titles = ['Peoples', 'Cartoons', 'Dolls', 'Game of Thrones', 'Statues']\n", + "image_lengths = [8, 4, 8, 9, 4]\n", + "\n", + "image_tab = ipywidgets.Tab()\n", + "image_tab.children = [ipywidgets.HBox([create_image(i, j) for j in range(length)]) for i, length in enumerate(image_lengths)]\n", + "for i, title in enumerate(image_titles):\n", + "\timage_tab.set_title(i, title)\n", + "\n", + "input_image_widget = ipywidgets.Output()\n", + "input_image_widget.add_class('input-widget')\n", + "upload_input_image_button = ipywidgets.FileUpload(accept='image/*', button_style='primary')\n", + "upload_input_image_button.add_class('input-button')\n", + "image_part = ipywidgets.HBox([\n", + "\tipywidgets.VBox([input_image_widget, upload_input_image_button]),\n", + "\tlabel_or,\n", + "\timage_tab\n", + "])\n", + "\n", + "video_tab = ipywidgets.Tab()\n", + "video_tab.children = [ipywidgets.HBox([create_video(i) for i in range(5)])]\n", + "video_tab.set_title(0, 'All Videos')\n", + "\n", + "input_video_widget = ipywidgets.Output()\n", + "input_video_widget.add_class('input-widget')\n", + "upload_input_video_button = ipywidgets.FileUpload(accept='video/*', button_style='primary')\n", + "upload_input_video_button.add_class('input-button')\n", + "video_part = ipywidgets.HBox([\n", + "\tipywidgets.VBox([input_video_widget, upload_input_video_button]),\n", + "\tlabel_or,\n", + "\tvideo_tab\n", + "])\n", + "\n", + "model = ipywidgets.Dropdown(\n", + "\tdescription=\"Model:\",\n", + "\toptions=[\n", + "\t\t'vox',\n", + "\t\t'vox-adv',\n", + "\t\t'taichi',\n", + "\t\t'taichi-adv',\n", + "\t\t'nemo',\n", + "\t\t'mgif',\n", + "\t\t'fashion',\n", + "\t\t'bair'\n", + "\t]\n", + ")\n", + "warning = ipywidgets.HTML('Warning: Upload your own images and videos (see README)')\n", + "warning.add_class('warning')\n", + "model_part = ipywidgets.HBox([model, warning])\n", + "\n", + "relative = ipywidgets.Checkbox(description=\"Relative keypoint displacement (Inherit object proporions from the video)\", value=True)\n", + "adapt_movement_scale = ipywidgets.Checkbox(description=\"Adapt movement scale (Don’t touch unless you know want you are doing)\", value=True)\n", + "generate_button = ipywidgets.Button(description=\"Generate\", button_style='primary')\n", + "main = ipywidgets.VBox([\n", + "\tcreate_title('Choose Image'),\n", + "\timage_part,\n", + "\tcreate_title('Choose Video'),\n", + "\tvideo_part,\n", + "\tcreate_title('Settings'),\n", + "\tmodel_part,\n", + "\trelative,\n", + "\tadapt_movement_scale,\n", + "\tgenerate_button\n", + "])\n", + "\n", + "loader = ipywidgets.Label()\n", + "loader.add_class(\"loader\")\n", + "loading_label = ipywidgets.Label(\"This may take several minutes to process…\")\n", + "loading_label.add_class(\"loading-label\")\n", + "progress_bar = ipywidgets.Output()\n", + "loading = ipywidgets.VBox([loader, loading_label, progress_bar])\n", + "loading.add_class('loading')\n", + "\n", + "output_widget = ipywidgets.Output()\n", + "output_widget.add_class('output-widget')\n", + "download = ipywidgets.Button(description='Download', button_style='primary')\n", + "download.add_class('output-button')\n", + "download.on_click(download_output)\n", + "convert = ipywidgets.Button(description='Convert to 1920×1080', button_style='primary')\n", + "convert.add_class('output-button')\n", + "convert.on_click(convert_output)\n", + "back = ipywidgets.Button(description='Back', button_style='primary')\n", + "back.add_class('output-button')\n", + "back.on_click(back_to_main)\n", + "\n", + "comparison_widget = ipywidgets.Output()\n", + "comparison_widget.add_class('comparison-widget')\n", + "comparison_label = ipywidgets.Label('Comparison')\n", + "comparison_label.add_class('comparison-label')\n", + "complete = ipywidgets.HBox([\n", + "\tipywidgets.VBox([output_widget, download, convert, back]),\n", + "\tipywidgets.VBox([comparison_widget, comparison_label])\n", + "])\n", + "\n", + "display(ipywidgets.VBox([main, loading, complete]))\n", + "display(Javascript(\"\"\"\n", + "var images, videos;\n", + "function deselectImages() {\n", + "\timages.forEach(function(item) {\n", + "\t\titem.classList.remove(\"selected\");\n", + "\t});\n", + "}\n", + "function deselectVideos() {\n", + "\tvideos.forEach(function(item) {\n", + "\t\titem.classList.remove(\"selected\");\n", + "\t});\n", + "}\n", + "function invokePython(func) {\n", + "\tgoogle.colab.kernel.invokeFunction(\"notebook.\" + func, [].slice.call(arguments, 1), {});\n", + "}\n", + "setTimeout(function() {\n", + "\t(images = [].slice.call(document.getElementsByClassName(\"resource-image\"))).forEach(function(item) {\n", + "\t\titem.addEventListener(\"click\", function() {\n", + "\t\t\tdeselectImages();\n", + "\t\t\titem.classList.add(\"selected\");\n", + "\t\t\tinvokePython(\"select_image\", item.className.match(/resource-image(\\d\\d)/)[1]);\n", + "\t\t});\n", + "\t});\n", + "\timages[0].classList.add(\"selected\");\n", + "\t(videos = [].slice.call(document.getElementsByClassName(\"resource-video\"))).forEach(function(item) {\n", + "\t\titem.addEventListener(\"click\", function() {\n", + "\t\t\tdeselectVideos();\n", + "\t\t\titem.classList.add(\"selected\");\n", + "\t\t\tinvokePython(\"select_video\", item.className.match(/resource-video(\\d)/)[1]);\n", + "\t\t});\n", + "\t});\n", + "\tvideos[0].classList.add(\"selected\");\n", + "}, 1000);\n", + "\"\"\"))\n", + "\n", + "selected_image = None\n", + "def select_image(filename):\n", + "\tglobal selected_image\n", + "\tselected_image = resize(PIL.Image.open('demo/images/%s.png' % filename).convert(\"RGB\"))\n", + "\tinput_image_widget.clear_output(wait=True)\n", + "\twith input_image_widget:\n", + "\t\tdisplay(HTML('Image'))\n", + "\tinput_image_widget.remove_class('uploaded')\n", + "output.register_callback(\"notebook.select_image\", select_image)\n", + "\n", + "selected_video = None\n", + "def select_video(filename):\n", + "\tglobal selected_video\n", + "\tselected_video = 'demo/videos/%s.mp4' % filename\n", + "\tinput_video_widget.clear_output(wait=True)\n", + "\twith input_video_widget:\n", + "\t\tdisplay(HTML('Video'))\n", + "\tinput_video_widget.remove_class('uploaded')\n", + "output.register_callback(\"notebook.select_video\", select_video)\n", + "\n", + "def resize(image, size=(256, 256)):\n", + "\tw, h = image.size\n", + "\td = min(w, h)\n", + "\tr = ((w - d) // 2, (h - d) // 2, (w + d) // 2, (h + d) // 2)\n", + "\treturn image.resize(size, resample=PIL.Image.LANCZOS, box=r)\n", + "\n", + "def upload_image(change):\n", + "\tglobal selected_image\n", + "\tfor name, file_info in upload_input_image_button.value.items():\n", + "\t\tcontent = file_info['content']\n", + "\tif content is not None:\n", + "\t\tselected_image = resize(PIL.Image.open(io.BytesIO(content)).convert(\"RGB\"))\n", + "\t\tinput_image_widget.clear_output(wait=True)\n", + "\t\twith input_image_widget:\n", + "\t\t\tdisplay(selected_image)\n", + "\t\tinput_image_widget.add_class('uploaded')\n", + "\t\tdisplay(Javascript('deselectImages()'))\n", + "upload_input_image_button.observe(upload_image, names='value')\n", + "\n", + "def upload_video(change):\n", + "\tglobal selected_video\n", + "\tfor name, file_info in upload_input_video_button.value.items():\n", + "\t\tcontent = file_info['content']\n", + "\tif content is not None:\n", + "\t\tselected_video = 'user/' + name\n", + "\t\twith open(selected_video, 'wb') as video:\n", + "\t\t\tvideo.write(content)\n", + "\t\tpreview = resize(PIL.Image.fromarray(thumbnail(selected_video)).convert(\"RGB\"))\n", + "\t\tinput_video_widget.clear_output(wait=True)\n", + "\t\twith input_video_widget:\n", + "\t\t\tdisplay(preview)\n", + "\t\tinput_video_widget.add_class('uploaded')\n", + "\t\tdisplay(Javascript('deselectVideos()'))\n", + "upload_input_video_button.observe(upload_video, names='value')\n", + "\n", + "def change_model(change):\n", + "\tif model.value.startswith('vox'):\n", + "\t\twarning.remove_class('warn')\n", + "\telse:\n", + "\t\twarning.add_class('warn')\n", + "model.observe(change_model, names='value')\n", + "\n", + "def generate(button):\n", + "\tmain.layout.display = 'none'\n", + "\tloading.layout.display = ''\n", + "\tfilename = model.value + ('' if model.value == 'fashion' else '-cpk') + '.pth.tar'\n", + "\tif not os.path.isfile(filename):\n", + "\t\tresponse = requests.get('https://github.com/graphemecluster/first-order-model-demo/releases/download/checkpoints/' + filename, stream=True)\n", + "\t\twith progress_bar:\n", + "\t\t\twith tqdm.wrapattr(response.raw, 'read', total=int(response.headers.get('Content-Length', 0)), unit='B', unit_scale=True, unit_divisor=1024) as raw:\n", + "\t\t\t\twith open(filename, 'wb') as file:\n", + "\t\t\t\t\tcopyfileobj(raw, file)\n", + "\t\tprogress_bar.clear_output()\n", + "\treader = imageio.get_reader(selected_video, mode='I', format='FFMPEG')\n", + "\tfps = reader.get_meta_data()['fps']\n", + "\tdriving_video = []\n", + "\tfor frame in reader:\n", + "\t\tdriving_video.append(frame)\n", + "\tgenerator, kp_detector = load_checkpoints(config_path='config/%s-256.yaml' % model.value, checkpoint_path=filename)\n", + "\twith progress_bar:\n", + "\t\tpredictions = make_animation(\n", + "\t\t\tskimage.transform.resize(numpy.asarray(selected_image), (256, 256)),\n", + "\t\t\t[skimage.transform.resize(frame, (256, 256)) for frame in driving_video],\n", + "\t\t\tgenerator,\n", + "\t\t\tkp_detector,\n", + "\t\t\trelative=relative.value,\n", + "\t\t\tadapt_movement_scale=adapt_movement_scale.value\n", + "\t\t)\n", + "\tprogress_bar.clear_output()\n", + "\timageio.mimsave('output.mp4', [img_as_ubyte(frame) for frame in predictions], fps=fps)\n", + "\ttry:\n", + "\t\twith NamedTemporaryFile(suffix='.mp4') as output:\n", + "\t\t\tffmpeg.output(ffmpeg.input('output.mp4').video, ffmpeg.input(selected_video).audio, output.name, c='copy').run()\n", + "\t\t\twith open('output.mp4', 'wb') as result:\n", + "\t\t\t\tcopyfileobj(output, result)\n", + "\texcept ffmpeg.Error:\n", + "\t\tpass\n", + "\toutput_widget.clear_output(True)\n", + "\twith output_widget:\n", + "\t\tvideo_widget = ipywidgets.Video.from_file('output.mp4', autoplay=False, loop=False)\n", + "\t\tvideo_widget.add_class('video')\n", + "\t\tvideo_widget.add_class('video-left')\n", + "\t\tdisplay(video_widget)\n", + "\tcomparison_widget.clear_output(True)\n", + "\twith comparison_widget:\n", + "\t\tvideo_widget = ipywidgets.Video.from_file(selected_video, autoplay=False, loop=False, controls=False)\n", + "\t\tvideo_widget.add_class('video')\n", + "\t\tvideo_widget.add_class('video-right')\n", + "\t\tdisplay(video_widget)\n", + "\tdisplay(Javascript(\"\"\"\n", + "\tsetTimeout(function() {\n", + "\t\t(function(left, right) {\n", + "\t\t\tleft.addEventListener(\"play\", function() {\n", + "\t\t\t\tright.play();\n", + "\t\t\t});\n", + "\t\t\tleft.addEventListener(\"pause\", function() {\n", + "\t\t\t\tright.pause();\n", + "\t\t\t});\n", + "\t\t\tleft.addEventListener(\"seeking\", function() {\n", + "\t\t\t\tright.currentTime = left.currentTime;\n", + "\t\t\t});\n", + "\t\t\tright.muted = true;\n", + "\t\t})(document.getElementsByClassName(\"video-left\")[0], document.getElementsByClassName(\"video-right\")[0]);\n", + "\t}, 1000);\n", + "\t\"\"\"))\n", + "\tloading.layout.display = 'none'\n", + "\tcomplete.layout.display = ''\n", + "\n", + "generate_button.on_click(generate)\n", + "\n", + "loading.layout.display = 'none'\n", + "complete.layout.display = 'none'\n", + "select_image('00')\n", + "select_video('0')" + ], + "execution_count": null, + "outputs": [] + } + ] +} diff --git a/firstordermodel/demo.py b/firstordermodel/demo.py new file mode 100644 index 0000000..81a8c8d --- /dev/null +++ b/firstordermodel/demo.py @@ -0,0 +1,169 @@ +#!/user/bin/env python +# coding=utf-8 + +import uuid +from typing import Optional + +import gradio as gr +import matplotlib + +matplotlib.use('Agg') +import os +import sys +import yaml +from argparse import ArgumentParser +from tqdm import tqdm + +import imageio +import numpy as np +from skimage.transform import resize +from skimage import img_as_ubyte +import torch +from sync_batchnorm import DataParallelWithCallback + +from modules.generator import OcclusionAwareGenerator +from modules.keypoint_detector import KPDetector +from animate import normalize_kp +from scipy.spatial import ConvexHull + +if sys.version_info[0] < 3: + raise Exception("You must use Python 3 or higher. Recommended version is Python 3.7") + + +def load_checkpoints(config_path, checkpoint_path, cpu=True): + with open(config_path) as f: + config = yaml.load(f, Loader=yaml.FullLoader) + + generator = OcclusionAwareGenerator(**config['model_params']['generator_params'], + **config['model_params']['common_params']) + if not cpu: + generator.cuda() + + kp_detector = KPDetector(**config['model_params']['kp_detector_params'], + **config['model_params']['common_params']) + if not cpu: + kp_detector.cuda() + + if cpu: + checkpoint = torch.load(checkpoint_path, map_location=torch.device('cpu')) + else: + checkpoint = torch.load(checkpoint_path) + + generator.load_state_dict(checkpoint['generator']) + kp_detector.load_state_dict(checkpoint['kp_detector']) + + if not cpu: + generator = DataParallelWithCallback(generator) + kp_detector = DataParallelWithCallback(kp_detector) + + generator.eval() + kp_detector.eval() + + return generator, kp_detector + + +def make_animation(source_image, driving_video, generator, kp_detector, relative=True, adapt_movement_scale=True, + cpu=True): + with torch.no_grad(): + predictions = [] + source = torch.tensor(source_image[np.newaxis].astype(np.float32)).permute(0, 3, 1, 2) + if not cpu: + source = source.cuda() + driving = torch.tensor(np.array(driving_video)[np.newaxis].astype(np.float32)).permute(0, 4, 1, 2, 3) + kp_source = kp_detector(source) + kp_driving_initial = kp_detector(driving[:, :, 0]) + + for frame_idx in tqdm(range(driving.shape[2])): + driving_frame = driving[:, :, frame_idx] + if not cpu: + driving_frame = driving_frame.cuda() + kp_driving = kp_detector(driving_frame) + kp_norm = normalize_kp(kp_source=kp_source, kp_driving=kp_driving, + kp_driving_initial=kp_driving_initial, use_relative_movement=relative, + use_relative_jacobian=relative, adapt_movement_scale=adapt_movement_scale) + out = generator(source, kp_source=kp_source, kp_driving=kp_norm) + + predictions.append(np.transpose(out['prediction'].data.cpu().numpy(), [0, 2, 3, 1])[0]) + return predictions + + +def find_best_frame(source, driving, cpu=False): + import face_alignment + + def normalize_kp(kp): + kp = kp - kp.mean(axis=0, keepdims=True) + area = ConvexHull(kp[:, :2]).volume + area = np.sqrt(area) + kp[:, :2] = kp[:, :2] / area + return kp + + fa = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=True, + device='cpu' if cpu else 'cuda') + kp_source = fa.get_landmarks(255 * source)[0] + kp_source = normalize_kp(kp_source) + norm = float('inf') + frame_num = 0 + for i, image in tqdm(enumerate(driving)): + kp_driving = fa.get_landmarks(255 * image)[0] + kp_driving = normalize_kp(kp_driving) + new_norm = (np.abs(kp_source - kp_driving) ** 2).sum() + if new_norm < norm: + norm = new_norm + frame_num = i + return frame_num + + +def h_interface(input_image: np.ndarray): + parser = ArgumentParser() + opt = parser.parse_args() + opt.config = "./config/vox-adv-256.yaml" + opt.checkpoint = "./checkpoints/vox-adv-cpk.pth.tar" + opt.source_image = input_image + opt.driving_video = "./data/chuck.mp4" + opt.result_video = "./data/result.mp4".format(uuid.uuid1().hex) + opt.relative = True + opt.adapt_scale = True + opt.cpu = True + opt.find_best_frame = False + opt.best_frame = False + + source_image = opt.source_image + reader = imageio.get_reader(opt.driving_video) + fps = reader.get_meta_data()['fps'] + driving_video = [] + try: + for im in reader: + driving_video.append(im) + except RuntimeError: + pass + reader.close() + + source_image = resize(source_image, (256, 256))[..., :3] + driving_video = [resize(frame, (256, 256))[..., :3] for frame in driving_video] + generator, kp_detector = load_checkpoints(config_path=opt.config, checkpoint_path=opt.checkpoint, cpu=opt.cpu) + + if opt.find_best_frame or opt.best_frame is not None: + i = opt.best_frame if opt.best_frame is not None else find_best_frame(source_image, driving_video, cpu=opt.cpu) + print("Best frame: " + str(i)) + driving_forward = driving_video[i:] + driving_backward = driving_video[:(i + 1)][::-1] + predictions_forward = make_animation(source_image, driving_forward, generator, kp_detector, + relative=opt.relative, adapt_movement_scale=opt.adapt_scale, cpu=opt.cpu) + predictions_backward = make_animation(source_image, driving_backward, generator, kp_detector, + relative=opt.relative, adapt_movement_scale=opt.adapt_scale, cpu=opt.cpu) + predictions = predictions_backward[::-1] + predictions_forward[1:] + else: + predictions = make_animation(source_image, driving_video, generator, kp_detector, relative=opt.relative, + adapt_movement_scale=opt.adapt_scale, cpu=opt.cpu) + imageio.mimsave(opt.result_video, [img_as_ubyte(frame) for frame in predictions], fps=fps) + return opt.result_video + + +if __name__ == "__main__": + demo = gr.Interface( + fn=h_interface, + inputs=gr.Image(type="numpy", label="Input Image"), + outputs=gr.Video(label="Output Video") + ) + + demo.launch() diff --git a/firstordermodel/demo.txt b/firstordermodel/demo.txt new file mode 100644 index 0000000..9e0570a --- /dev/null +++ b/firstordermodel/demo.txt @@ -0,0 +1,168 @@ +import sys +import yaml +from argparse import ArgumentParser +from tqdm.auto import tqdm + +import imageio +import numpy as np +from skimage.transform import resize +from skimage import img_as_ubyte +import torch +from sync_batchnorm import DataParallelWithCallback + +from modules.generator import OcclusionAwareGenerator +from modules.keypoint_detector import KPDetector +from animate import normalize_kp + +import ffmpeg +from os.path import splitext +from shutil import copyfileobj +from tempfile import NamedTemporaryFile + +if sys.version_info[0] < 3: + raise Exception("You must use Python 3 or higher. Recommended version is Python 3.7") + +def load_checkpoints(config_path, checkpoint_path, cpu=True): + + with open(config_path) as f: + config = yaml.full_load(f) + + generator = OcclusionAwareGenerator(**config['model_params']['generator_params'], + **config['model_params']['common_params']) + if not cpu: + generator.cuda() + + kp_detector = KPDetector(**config['model_params']['kp_detector_params'], + **config['model_params']['common_params']) + if not cpu: + kp_detector.cuda() + + if cpu: + checkpoint = torch.load(checkpoint_path, map_location=torch.device('cpu')) + else: + checkpoint = torch.load(checkpoint_path) + + generator.load_state_dict(checkpoint['generator']) + kp_detector.load_state_dict(checkpoint['kp_detector']) + + if not cpu: + generator = DataParallelWithCallback(generator) + kp_detector = DataParallelWithCallback(kp_detector) + + generator.eval() + kp_detector.eval() + + return generator, kp_detector + + +def make_animation(source_image, driving_video, generator, kp_detector, relative=True, adapt_movement_scale=True, cpu=True): + with torch.no_grad(): + predictions = [] + source = torch.tensor(source_image[np.newaxis].astype(np.float32)).permute(0, 3, 1, 2) + if not cpu: + source = source.cuda() + driving = torch.tensor(np.array(driving_video)[np.newaxis].astype(np.float32)).permute(0, 4, 1, 2, 3) + kp_source = kp_detector(source) + kp_driving_initial = kp_detector(driving[:, :, 0]) + + for frame_idx in tqdm(range(driving.shape[2])): + driving_frame = driving[:, :, frame_idx] + if not cpu: + driving_frame = driving_frame.cuda() + kp_driving = kp_detector(driving_frame) + kp_norm = normalize_kp(kp_source=kp_source, kp_driving=kp_driving, + kp_driving_initial=kp_driving_initial, use_relative_movement=relative, + use_relative_jacobian=relative, adapt_movement_scale=adapt_movement_scale) + out = generator(source, kp_source=kp_source, kp_driving=kp_norm) + + predictions.append(np.transpose(out['prediction'].data.cpu().numpy(), [0, 2, 3, 1])[0]) + return predictions + +def find_best_frame(source, driving, cpu=False): + import face_alignment # type: ignore (local file) + from scipy.spatial import ConvexHull + + def normalize_kp(kp): + kp = kp - kp.mean(axis=0, keepdims=True) + area = ConvexHull(kp[:, :2]).volume + area = np.sqrt(area) + kp[:, :2] = kp[:, :2] / area + return kp + + fa = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=True, + device='cpu' if cpu else 'cuda') + kp_source = fa.get_landmarks(255 * source)[0] + kp_source = normalize_kp(kp_source) + norm = float('inf') + frame_num = 0 + for i, image in tqdm(enumerate(driving)): + kp_driving = fa.get_landmarks(255 * image)[0] + kp_driving = normalize_kp(kp_driving) + new_norm = (np.abs(kp_source - kp_driving) ** 2).sum() + if new_norm < norm: + norm = new_norm + frame_num = i + return frame_num + +if __name__ == "__main__": + parser = ArgumentParser() + parser.add_argument("--config", required=True, help="path to config") + parser.add_argument("--checkpoint", default='vox-cpk.pth.tar', help="path to checkpoint to restore") + + parser.add_argument("--source_image", default='sup-mat/source.png', help="path to source image") + parser.add_argument("--driving_video", default='driving.mp4', help="path to driving video") + parser.add_argument("--result_video", default='result.mp4', help="path to output") + + parser.add_argument("--relative", dest="relative", action="store_true", help="use relative or absolute keypoint coordinates") + parser.add_argument("--adapt_scale", dest="adapt_scale", action="store_true", help="adapt movement scale based on convex hull of keypoints") + + parser.add_argument("--find_best_frame", dest="find_best_frame", action="store_true", + help="Generate from the frame that is the most alligned with source. (Only for faces, requires face_aligment lib)") + + parser.add_argument("--best_frame", dest="best_frame", type=int, default=None, help="Set frame to start from.") + + parser.add_argument("--cpu", dest="cpu", action="store_true", help="cpu mode.") + + parser.add_argument("--audio", dest="audio", action="store_true", help="copy audio to output from the driving video" ) + + parser.set_defaults(relative=False) + parser.set_defaults(adapt_scale=False) + parser.set_defaults(audio_on=False) + + opt = parser.parse_args() + + source_image = imageio.imread(opt.source_image) + reader = imageio.get_reader(opt.driving_video) + fps = reader.get_meta_data()['fps'] + driving_video = [] + try: + for im in reader: + driving_video.append(im) + except RuntimeError: + pass + reader.close() + + source_image = resize(source_image, (256, 256))[..., :3] + driving_video = [resize(frame, (256, 256))[..., :3] for frame in driving_video] + generator, kp_detector = load_checkpoints(config_path=opt.config, checkpoint_path=opt.checkpoint, cpu=opt.cpu) + + if opt.find_best_frame or opt.best_frame is not None: + i = opt.best_frame if opt.best_frame is not None else find_best_frame(source_image, driving_video, cpu=opt.cpu) + print("Best frame: " + str(i)) + driving_forward = driving_video[i:] + driving_backward = driving_video[:(i+1)][::-1] + predictions_forward = make_animation(source_image, driving_forward, generator, kp_detector, relative=opt.relative, adapt_movement_scale=opt.adapt_scale, cpu=opt.cpu) + predictions_backward = make_animation(source_image, driving_backward, generator, kp_detector, relative=opt.relative, adapt_movement_scale=opt.adapt_scale, cpu=opt.cpu) + predictions = predictions_backward[::-1] + predictions_forward[1:] + else: + predictions = make_animation(source_image, driving_video, generator, kp_detector, relative=opt.relative, adapt_movement_scale=opt.adapt_scale, cpu=opt.cpu) + imageio.mimsave(opt.result_video, [img_as_ubyte(frame) for frame in predictions], fps=fps) + + if opt.audio: + try: + with NamedTemporaryFile(suffix=splitext(opt.result_video)[1]) as output: + ffmpeg.output(ffmpeg.input(opt.result_video).video, ffmpeg.input(opt.driving_video).audio, output.name, c='copy').run() + with open(opt.result_video, 'wb') as result: + copyfileobj(output, result) + except ffmpeg.Error: + print("Failed to copy audio: the driving video may have no audio track or the audio format is invalid.") \ No newline at end of file diff --git a/firstordermodel/demo_jupyter.ipynb b/firstordermodel/demo_jupyter.ipynb new file mode 100644 index 0000000..81dda5b --- /dev/null +++ b/firstordermodel/demo_jupyter.ipynb @@ -0,0 +1,804 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "view-in-github" + }, + "source": [ + "\"Open\"Kaggle\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cdO_RxQZLahB" + }, + "source": [ + "# Demo for paper \"First Order Motion Model for Image Animation\"\n", + "To try the demo, press the 2 play buttons in order and scroll to the bottom. Note that it may take several minutes to load." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "id": "UCMFMJV7K-ag" + }, + "outputs": [], + "source": [ + "%%capture\n", + "%pip install ffmpeg-python imageio-ffmpeg\n", + "!git init .\n", + "!git remote add origin https://github.com/AliaksandrSiarohin/first-order-model\n", + "!git pull origin master\n", + "!git clone https://github.com/graphemecluster/first-order-model-demo demo" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "metadata": { + "id": "Oxi6-riLOgnm" + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "f53c7ccd3ec34f7ea8491237d5bf03ff", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "VBox(children=(VBox(children=(Label(value='Choose Image', _dom_classes=('title',)), HBox(children=(VBox(childr…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/javascript": [ + "\n", + "var images, videos;\n", + "function deselectImages() {\n", + "\timages.forEach(function(item) {\n", + "\t\titem.classList.remove(\"selected\");\n", + "\t});\n", + "}\n", + "function deselectVideos() {\n", + "\tvideos.forEach(function(item) {\n", + "\t\titem.classList.remove(\"selected\");\n", + "\t});\n", + "}\n", + "function invokePython(func) {\n", + "\tgoogle.colab.kernel.invokeFunction(\"notebook.\" + func, [].slice.call(arguments, 1), {});\n", + "}\n", + "setTimeout(function() {\n", + "\t(images = [].slice.call(document.getElementsByClassName(\"resource-image\"))).forEach(function(item) {\n", + "\t\titem.addEventListener(\"click\", function() {\n", + "\t\t\tdeselectImages();\n", + "\t\t\titem.classList.add(\"selected\");\n", + "\t\t\tinvokePython(\"select_image\", item.className.match(/resource-image(\\d\\d)/)[1]);\n", + "\t\t});\n", + "\t});\n", + "\timages[0].classList.add(\"selected\");\n", + "\t(videos = [].slice.call(document.getElementsByClassName(\"resource-video\"))).forEach(function(item) {\n", + "\t\titem.addEventListener(\"click\", function() {\n", + "\t\t\tdeselectVideos();\n", + "\t\t\titem.classList.add(\"selected\");\n", + "\t\t\tinvokePython(\"select_video\", item.className.match(/resource-video(\\d)/)[1]);\n", + "\t\t});\n", + "\t});\n", + "\tvideos[0].classList.add(\"selected\");\n", + "}, 1000);\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import IPython.display\n", + "import PIL.Image\n", + "import cv2\n", + "import ffmpeg\n", + "import imageio\n", + "import io\n", + "import ipywidgets\n", + "import numpy\n", + "import os.path\n", + "import requests\n", + "import skimage.transform\n", + "import warnings\n", + "from base64 import b64encode\n", + "from demo import load_checkpoints, make_animation # type: ignore (local file)\n", + "from IPython.display import HTML, Javascript\n", + "from shutil import copyfileobj\n", + "from skimage import img_as_ubyte\n", + "from tempfile import NamedTemporaryFile\n", + "import os\n", + "import ipywidgets as ipyw\n", + "from IPython.display import display, FileLink\n", + "warnings.filterwarnings(\"ignore\")\n", + "os.makedirs(\"user\", exist_ok=True)\n", + "\n", + "display(HTML(\"\"\"\n", + "\n", + "\"\"\"))\n", + "\n", + "\n", + "def uploaded_file(change):\n", + " save_dir = 'uploads'\n", + " if not os.path.exists(save_dir): os.mkdir(save_dir)\n", + " \n", + " uploads = change['new']\n", + " for upload in uploads:\n", + " filename = upload['name']\n", + " content = upload['content']\n", + " with open(os.path.join(save_dir,filename), 'wb') as f:\n", + " f.write(content)\n", + " with out:\n", + " print(change)\n", + " \n", + "def create_uploader():\n", + " uploader = ipyw.FileUpload(multiple=True)\n", + " display(uploader)\n", + " uploader.description = '📂 Upload'\n", + " uploader.observe(uploaded_file, names='value')\n", + "\n", + "def download_file(filename='./face.mp4') -> HTML:\n", + " fl=FileLink(filename)\n", + " fl.html_link_str =\"%s\"\n", + " \n", + " display(fl)\n", + " display(HTML(f\"\"\"\n", + "\n", + "\"\"\"))\n", + " \n", + "def thumbnail(file):\n", + "\treturn imageio.get_reader(file, mode='I', format='FFMPEG').get_next_data()\n", + "\n", + "def create_image(i, j):\n", + "\timage_widget = ipywidgets.Image.from_file('demo/images/%d%d.png' % (i, j))\n", + "\timage_widget.add_class('resource')\n", + "\timage_widget.add_class('resource-image')\n", + "\timage_widget.add_class('resource-image%d%d' % (i, j))\n", + "\treturn image_widget\n", + "\n", + "def create_video(i):\n", + "\tvideo_widget = ipywidgets.Image(\n", + "\t\tvalue=cv2.imencode('.png', cv2.cvtColor(thumbnail('demo/videos/%d.mp4' % i), cv2.COLOR_RGB2BGR))[1].tostring(),\n", + "\t\tformat='png'\n", + "\t)\n", + "\tvideo_widget.add_class('resource')\n", + "\tvideo_widget.add_class('resource-video')\n", + "\tvideo_widget.add_class('resource-video%d' % i)\n", + "\treturn video_widget\n", + "\n", + "def create_title(title):\n", + "\ttitle_widget = ipywidgets.Label(title)\n", + "\ttitle_widget.add_class('title')\n", + "\treturn title_widget\n", + "\n", + "def download_output(button):\n", + "\tcomplete.layout.display = 'none'\n", + "\tloading.layout.display = ''\n", + "\tdownload_file('./output.mp4')\n", + "\t# files.download('output.mp4')\n", + "\tloading.layout.display = 'none'\n", + "\tcomplete.layout.display = ''\n", + "\n", + "def convert_output(button):\n", + "\tcomplete.layout.display = 'none'\n", + "\tloading.layout.display = ''\n", + "\tffmpeg.input('output.mp4').output('scaled.mp4', vf='scale=1080x1080:flags=lanczos,pad=1920:1080:420:0').overwrite_output().run()\n", + "\tfiles.download('scaled.mp4')\n", + "\tloading.layout.display = 'none'\n", + "\tcomplete.layout.display = ''\n", + "\n", + "def back_to_main(button):\n", + "\tcomplete.layout.display = 'none'\n", + "\tmain.layout.display = ''\n", + "\n", + "label_or = ipywidgets.Label('or')\n", + "label_or.add_class('label-or')\n", + "\n", + "image_titles = ['Peoples', 'Cartoons', 'Dolls', 'Game of Thrones', 'Statues']\n", + "image_lengths = [8, 4, 8, 9, 4]\n", + "\n", + "image_tab = ipywidgets.Tab()\n", + "image_tab.children = [ipywidgets.HBox([create_image(i, j) for j in range(length)]) for i, length in enumerate(image_lengths)]\n", + "for i, title in enumerate(image_titles):\n", + "\timage_tab.set_title(i, title)\n", + "\n", + "input_image_widget = ipywidgets.Output()\n", + "input_image_widget.add_class('input-widget')\n", + "upload_input_image_button = ipywidgets.FileUpload(accept='image/*', button_style='primary')\n", + "upload_input_image_button.add_class('input-button')\n", + "image_part = ipywidgets.HBox([\n", + "\tipywidgets.VBox([input_image_widget, upload_input_image_button]),\n", + "\tlabel_or,\n", + "\timage_tab\n", + "])\n", + "\n", + "video_tab = ipywidgets.Tab()\n", + "video_tab.children = [ipywidgets.HBox([create_video(i) for i in range(5)])]\n", + "video_tab.set_title(0, 'All Videos')\n", + "\n", + "input_video_widget = ipywidgets.Output()\n", + "input_video_widget.add_class('input-widget')\n", + "upload_input_video_button = ipywidgets.FileUpload(accept='video/*', button_style='primary')\n", + "upload_input_video_button.add_class('input-button')\n", + "video_part = ipywidgets.HBox([\n", + "\tipywidgets.VBox([input_video_widget, upload_input_video_button]),\n", + "\tlabel_or,\n", + "\tvideo_tab\n", + "])\n", + "\n", + "model = ipywidgets.Dropdown(\n", + "\tdescription=\"Model:\",\n", + "\toptions=[\n", + "\t\t'vox',\n", + "\t\t'vox-adv',\n", + "\t\t'taichi',\n", + "\t\t'taichi-adv',\n", + "\t\t'nemo',\n", + "\t\t'mgif',\n", + "\t\t'fashion',\n", + "\t\t'bair'\n", + "\t]\n", + ")\n", + "warning = ipywidgets.HTML('Warning: Upload your own images and videos (see README)')\n", + "warning.add_class('warning')\n", + "model_part = ipywidgets.HBox([model, warning])\n", + "\n", + "relative = ipywidgets.Checkbox(description=\"Relative keypoint displacement (Inherit object proporions from the video)\", value=True)\n", + "adapt_movement_scale = ipywidgets.Checkbox(description=\"Adapt movement scale (Don’t touch unless you know want you are doing)\", value=True)\n", + "generate_button = ipywidgets.Button(description=\"Generate\", button_style='primary')\n", + "main = ipywidgets.VBox([\n", + "\tcreate_title('Choose Image'),\n", + "\timage_part,\n", + "\tcreate_title('Choose Video'),\n", + "\tvideo_part,\n", + "\tcreate_title('Settings'),\n", + "\tmodel_part,\n", + "\trelative,\n", + "\tadapt_movement_scale,\n", + "\tgenerate_button\n", + "])\n", + "\n", + "loader = ipywidgets.Label()\n", + "loader.add_class(\"loader\")\n", + "loading_label = ipywidgets.Label(\"This may take several minutes to process…\")\n", + "loading_label.add_class(\"loading-label\")\n", + "progress_bar = ipywidgets.Output()\n", + "loading = ipywidgets.VBox([loader, loading_label, progress_bar])\n", + "loading.add_class('loading')\n", + "\n", + "output_widget = ipywidgets.Output()\n", + "output_widget.add_class('output-widget')\n", + "download = ipywidgets.Button(description='Download', button_style='primary')\n", + "download.add_class('output-button')\n", + "download.on_click(download_output)\n", + "convert = ipywidgets.Button(description='Convert to 1920×1080', button_style='primary')\n", + "convert.add_class('output-button')\n", + "convert.on_click(convert_output)\n", + "back = ipywidgets.Button(description='Back', button_style='primary')\n", + "back.add_class('output-button')\n", + "back.on_click(back_to_main)\n", + "\n", + "comparison_widget = ipywidgets.Output()\n", + "comparison_widget.add_class('comparison-widget')\n", + "comparison_label = ipywidgets.Label('Comparison')\n", + "comparison_label.add_class('comparison-label')\n", + "complete = ipywidgets.HBox([\n", + "\tipywidgets.VBox([output_widget, download, convert, back]),\n", + "\tipywidgets.VBox([comparison_widget, comparison_label])\n", + "])\n", + "\n", + "display(ipywidgets.VBox([main, loading, complete]))\n", + "display(Javascript(\"\"\"\n", + "var images, videos;\n", + "function deselectImages() {\n", + "\timages.forEach(function(item) {\n", + "\t\titem.classList.remove(\"selected\");\n", + "\t});\n", + "}\n", + "function deselectVideos() {\n", + "\tvideos.forEach(function(item) {\n", + "\t\titem.classList.remove(\"selected\");\n", + "\t});\n", + "}\n", + "function invokePython(func) {\n", + "\tgoogle.colab.kernel.invokeFunction(\"notebook.\" + func, [].slice.call(arguments, 1), {});\n", + "}\n", + "setTimeout(function() {\n", + "\t(images = [].slice.call(document.getElementsByClassName(\"resource-image\"))).forEach(function(item) {\n", + "\t\titem.addEventListener(\"click\", function() {\n", + "\t\t\tdeselectImages();\n", + "\t\t\titem.classList.add(\"selected\");\n", + "\t\t\tinvokePython(\"select_image\", item.className.match(/resource-image(\\d\\d)/)[1]);\n", + "\t\t});\n", + "\t});\n", + "\timages[0].classList.add(\"selected\");\n", + "\t(videos = [].slice.call(document.getElementsByClassName(\"resource-video\"))).forEach(function(item) {\n", + "\t\titem.addEventListener(\"click\", function() {\n", + "\t\t\tdeselectVideos();\n", + "\t\t\titem.classList.add(\"selected\");\n", + "\t\t\tinvokePython(\"select_video\", item.className.match(/resource-video(\\d)/)[1]);\n", + "\t\t});\n", + "\t});\n", + "\tvideos[0].classList.add(\"selected\");\n", + "}, 1000);\n", + "\"\"\"))\n", + "\n", + "selected_image = None\n", + "def select_image(filename):\n", + "\tglobal selected_image\n", + "\tselected_image = resize(PIL.Image.open('demo/images/%s.png' % filename).convert(\"RGB\"))\n", + "\tinput_image_widget.clear_output(wait=True)\n", + "\twith input_image_widget:\n", + "\t\tdisplay(HTML('Image'))\n", + "\tinput_image_widget.remove_class('uploaded')\n", + "# output.register_callback(\"notebook.select_image\", select_image)\n", + "\n", + "selected_video = None\n", + "def select_video(filename):\n", + "\tglobal selected_video\n", + "\tselected_video = 'demo/videos/%s.mp4' % filename\n", + "\tinput_video_widget.clear_output(wait=True)\n", + "\twith input_video_widget:\n", + "\t\tdisplay(HTML('Video'))\n", + "\tinput_video_widget.remove_class('uploaded')\n", + "# output.register_callback(\"notebook.select_video\", select_video)\n", + "\n", + "def resize(image, size=(256, 256)):\n", + "\tw, h = image.size\n", + "\td = min(w, h)\n", + "\tr = ((w - d) // 2, (h - d) // 2, (w + d) // 2, (h + d) // 2)\n", + "\treturn image.resize(size, resample=PIL.Image.LANCZOS, box=r)\n", + "\n", + "def upload_image(change):\n", + "\tglobal selected_image\n", + "\tcontent = upload_input_image_button.value[0]['content']\n", + "\tname = upload_input_image_button.value[0]['name']\n", + " \n", + "\t# for name, file_info in upload_input_image_button.value.items():\n", + "\t\t# content = file_info['content']\n", + "\tif content is not None:\n", + "\t\tselected_image = resize(PIL.Image.open(io.BytesIO(content)).convert(\"RGB\"))\n", + "\t\tinput_image_widget.clear_output(wait=True)\n", + "\t\twith input_image_widget:\n", + "\t\t\tdisplay(selected_image)\n", + "\t\tinput_image_widget.add_class('uploaded')\n", + "\t\tdisplay(Javascript('deselectImages()'))\n", + "upload_input_image_button.observe(upload_image, names='value')\n", + "\n", + "def upload_video(change):\n", + "\tglobal selected_video\n", + "\t# for name, file_info in upload_input_video_button.value.items():\n", + "\t\t# content = file_info['content']\n", + "\tcontent = upload_input_video_button.value[0]['content']\n", + "\tname = upload_input_video_button.value[0]['name']\n", + "\tif content is not None:\n", + "\t\tselected_video = 'user/' + name\n", + "\t\tpreview = resize(PIL.Image.fromarray(thumbnail(content)).convert(\"RGB\"))\n", + "\t\tinput_video_widget.clear_output(wait=True)\n", + "\t\twith input_video_widget:\n", + "\t\t\tdisplay(preview)\n", + "\t\tinput_video_widget.add_class('uploaded')\n", + "\t\tdisplay(Javascript('deselectVideos()'))\n", + "\t\twith open(selected_video, 'wb') as video:\n", + "\t\t\tvideo.write(content)\n", + "upload_input_video_button.observe(upload_video, names='value')\n", + "\n", + "def change_model(change):\n", + "\tif model.value.startswith('vox'):\n", + "\t\twarning.remove_class('warn')\n", + "\telse:\n", + "\t\twarning.add_class('warn')\n", + "model.observe(change_model, names='value')\n", + "\n", + "def generate(button):\n", + "\tmain.layout.display = 'none'\n", + "\tloading.layout.display = ''\n", + "\tfilename = model.value + ('' if model.value == 'fashion' else '-cpk') + '.pth.tar'\n", + "\tif not os.path.isfile(filename):\n", + "\t\tdownload = requests.get(requests.get('https://cloud-api.yandex.net/v1/disk/public/resources/download?public_key=https://yadi.sk/d/lEw8uRm140L_eQ&path=/' + filename).json().get('href'))\n", + "\t\twith open(filename, 'wb') as checkpoint:\n", + "\t\t\tcheckpoint.write(download.content)\n", + "\treader = imageio.get_reader(selected_video, mode='I', format='FFMPEG')\n", + "\tfps = reader.get_meta_data()['fps']\n", + "\tdriving_video = []\n", + "\tfor frame in reader:\n", + "\t\tdriving_video.append(frame)\n", + "\tgenerator, kp_detector = load_checkpoints(config_path='config/%s-256.yaml' % model.value, checkpoint_path=filename)\n", + "\twith progress_bar:\n", + "\t\tpredictions = make_animation(\n", + "\t\t\tskimage.transform.resize(numpy.asarray(selected_image), (256, 256)),\n", + "\t\t\t[skimage.transform.resize(frame, (256, 256)) for frame in driving_video],\n", + "\t\t\tgenerator,\n", + "\t\t\tkp_detector,\n", + "\t\t\trelative=relative.value,\n", + "\t\t\tadapt_movement_scale=adapt_movement_scale.value\n", + "\t\t)\n", + "\tprogress_bar.clear_output()\n", + "\timageio.mimsave('output.mp4', [img_as_ubyte(frame) for frame in predictions], fps=fps)\n", + "\tif selected_video.startswith('user/') or selected_video == 'demo/videos/0.mp4':\n", + "\t\twith NamedTemporaryFile(suffix='.mp4') as output:\n", + "\t\t\tffmpeg.output(ffmpeg.input('output.mp4').video, ffmpeg.input(selected_video).audio, output.name, c='copy').overwrite_output().run()\n", + "\t\t\twith open('output.mp4', 'wb') as result:\n", + "\t\t\t\tcopyfileobj(output, result)\n", + "\twith output_widget:\n", + "\t\tvideo_widget = ipywidgets.Video.from_file('output.mp4', autoplay=False, loop=False)\n", + "\t\tvideo_widget.add_class('video')\n", + "\t\tvideo_widget.add_class('video-left')\n", + "\t\tdisplay(video_widget)\n", + "\twith comparison_widget:\n", + "\t\tvideo_widget = ipywidgets.Video.from_file(selected_video, autoplay=False, loop=False, controls=False)\n", + "\t\tvideo_widget.add_class('video')\n", + "\t\tvideo_widget.add_class('video-right')\n", + "\t\tdisplay(video_widget)\n", + "\tdisplay(Javascript(\"\"\"\n", + "\tsetTimeout(function() {\n", + "\t\t(function(left, right) {\n", + "\t\t\tleft.addEventListener(\"play\", function() {\n", + "\t\t\t\tright.play();\n", + "\t\t\t});\n", + "\t\t\tleft.addEventListener(\"pause\", function() {\n", + "\t\t\t\tright.pause();\n", + "\t\t\t});\n", + "\t\t\tleft.addEventListener(\"seeking\", function() {\n", + "\t\t\t\tright.currentTime = left.currentTime;\n", + "\t\t\t});\n", + "\t\t\tright.muted = true;\n", + "\t\t})(document.getElementsByClassName(\"video-left\")[0], document.getElementsByClassName(\"video-right\")[0]);\n", + "\t}, 1000);\n", + "\t\"\"\"))\n", + "\tloading.layout.display = 'none'\n", + "\tcomplete.layout.display = ''\n", + "\n", + "generate_button.on_click(generate)\n", + "\n", + "loading.layout.display = 'none'\n", + "complete.layout.display = 'none'\n", + "select_image('00')\n", + "select_video('0')" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "first-order-model-demo", + "provenance": [] + }, + "kernelspec": { + "display_name": "ldm", + "language": "python", + "name": "ldm" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/firstordermodel/flagged/Input Image/04312ccaaa34d31868cf/source_image.png b/firstordermodel/flagged/Input Image/04312ccaaa34d31868cf/source_image.png new file mode 100644 index 0000000..6eda782 Binary files /dev/null and b/firstordermodel/flagged/Input Image/04312ccaaa34d31868cf/source_image.png differ diff --git a/firstordermodel/flagged/Input Image/69f77a91286d9746662e/屏幕截图 2024-03-10 184951.png b/firstordermodel/flagged/Input Image/69f77a91286d9746662e/屏幕截图 2024-03-10 184951.png new file mode 100644 index 0000000..4ba0542 Binary files /dev/null and b/firstordermodel/flagged/Input Image/69f77a91286d9746662e/屏幕截图 2024-03-10 184951.png differ diff --git a/firstordermodel/flagged/Input Image/d7caf8b9c6c17d272f25/h09456_tm.txt.67257e.jpg b/firstordermodel/flagged/Input Image/d7caf8b9c6c17d272f25/h09456_tm.txt.67257e.jpg new file mode 100644 index 0000000..e86157d Binary files /dev/null and b/firstordermodel/flagged/Input Image/d7caf8b9c6c17d272f25/h09456_tm.txt.67257e.jpg differ diff --git a/firstordermodel/flagged/Output Video/4df32a504b02bc01fdbb/result.mp4 b/firstordermodel/flagged/Output Video/4df32a504b02bc01fdbb/result.mp4 new file mode 100644 index 0000000..223ef2a Binary files /dev/null and b/firstordermodel/flagged/Output Video/4df32a504b02bc01fdbb/result.mp4 differ diff --git a/firstordermodel/flagged/Output Video/c8bb6b7c069b5e9c6003/result.mp4 b/firstordermodel/flagged/Output Video/c8bb6b7c069b5e9c6003/result.mp4 new file mode 100644 index 0000000..366a79a Binary files /dev/null and b/firstordermodel/flagged/Output Video/c8bb6b7c069b5e9c6003/result.mp4 differ diff --git a/firstordermodel/flagged/log.csv b/firstordermodel/flagged/log.csv new file mode 100644 index 0000000..5bc0a63 --- /dev/null +++ b/firstordermodel/flagged/log.csv @@ -0,0 +1,5 @@ +Input Image,Output Video,flag,username,timestamp +flagged\Input Image\04312ccaaa34d31868cf\source_image.png,"{""video"": ""flagged\\Output Video\\4df32a504b02bc01fdbb\\result.mp4"", ""subtitles"": null}",,,2024-06-30 11:24:20.090888 +flagged\Input Image\69f77a91286d9746662e\屏幕截图 2024-03-10 184951.png,"{""video"": ""flagged\\Output Video\\c8bb6b7c069b5e9c6003\\result.mp4"", ""subtitles"": null}",,,2024-06-30 11:31:27.486262 +,,,,2024-06-30 11:42:15.283795 +flagged\Input Image\d7caf8b9c6c17d272f25\h09456_tm.txt.67257e.jpg,,,,2024-06-30 11:49:00.228115 diff --git a/firstordermodel/frames_dataset.py b/firstordermodel/frames_dataset.py new file mode 100644 index 0000000..7fd3400 --- /dev/null +++ b/firstordermodel/frames_dataset.py @@ -0,0 +1,197 @@ +import os +from skimage import io, img_as_float32 +from skimage.color import gray2rgb +from sklearn.model_selection import train_test_split +from imageio import mimread + +import numpy as np +from torch.utils.data import Dataset +import pandas as pd +from augmentation import AllAugmentationTransform +import glob + + +def read_video(name, frame_shape): + """ + Read video which can be: + - an image of concatenated frames + - '.mp4' and'.gif' + - folder with videos + """ + + if os.path.isdir(name): + frames = sorted(os.listdir(name)) + num_frames = len(frames) + video_array = np.array( + [img_as_float32(io.imread(os.path.join(name, frames[idx]))) for idx in range(num_frames)]) + elif name.lower().endswith('.png') or name.lower().endswith('.jpg'): + image = io.imread(name) + + if len(image.shape) == 2 or image.shape[2] == 1: + image = gray2rgb(image) + + if image.shape[2] == 4: + image = image[..., :3] + + image = img_as_float32(image) + + video_array = np.moveaxis(image, 1, 0) + + video_array = video_array.reshape((-1,) + frame_shape) + video_array = np.moveaxis(video_array, 1, 2) + elif name.lower().endswith('.gif') or name.lower().endswith('.mp4') or name.lower().endswith('.mov'): + video = np.array(mimread(name)) + if len(video.shape) == 3: + video = np.array([gray2rgb(frame) for frame in video]) + if video.shape[-1] == 4: + video = video[..., :3] + video_array = img_as_float32(video) + else: + raise Exception("Unknown file extensions %s" % name) + + return video_array + + +class FramesDataset(Dataset): + """ + Dataset of videos, each video can be represented as: + - an image of concatenated frames + - '.mp4' or '.gif' + - folder with all frames + """ + + def __init__(self, root_dir, frame_shape=(256, 256, 3), id_sampling=False, is_train=True, + random_seed=0, pairs_list=None, augmentation_params=None): + self.root_dir = root_dir + self.videos = os.listdir(root_dir) + self.frame_shape = tuple(frame_shape) + self.pairs_list = pairs_list + self.id_sampling = id_sampling + if os.path.exists(os.path.join(root_dir, 'train')): + assert os.path.exists(os.path.join(root_dir, 'test')) + print("Use predefined train-test split.") + if id_sampling: + train_videos = {os.path.basename(video).split('#')[0] for video in + os.listdir(os.path.join(root_dir, 'train'))} + train_videos = list(train_videos) + else: + train_videos = os.listdir(os.path.join(root_dir, 'train')) + test_videos = os.listdir(os.path.join(root_dir, 'test')) + self.root_dir = os.path.join(self.root_dir, 'train' if is_train else 'test') + else: + print("Use random train-test split.") + train_videos, test_videos = train_test_split(self.videos, random_state=random_seed, test_size=0.2) + + if is_train: + self.videos = train_videos + else: + self.videos = test_videos + + self.is_train = is_train + + if self.is_train: + self.transform = AllAugmentationTransform(**augmentation_params) + else: + self.transform = None + + def __len__(self): + return len(self.videos) + + def __getitem__(self, idx): + if self.is_train and self.id_sampling: + name = self.videos[idx] + path = np.random.choice(glob.glob(os.path.join(self.root_dir, name + '*.mp4'))) + else: + name = self.videos[idx] + path = os.path.join(self.root_dir, name) + + video_name = os.path.basename(path) + + if self.is_train and os.path.isdir(path): + frames = os.listdir(path) + num_frames = len(frames) + frame_idx = np.sort(np.random.choice(num_frames, replace=True, size=2)) + video_array = [img_as_float32(io.imread(os.path.join(path, frames[idx]))) for idx in frame_idx] + else: + video_array = read_video(path, frame_shape=self.frame_shape) + num_frames = len(video_array) + frame_idx = np.sort(np.random.choice(num_frames, replace=True, size=2)) if self.is_train else range( + num_frames) + video_array = video_array[frame_idx] + + if self.transform is not None: + video_array = self.transform(video_array) + + out = {} + if self.is_train: + source = np.array(video_array[0], dtype='float32') + driving = np.array(video_array[1], dtype='float32') + + out['driving'] = driving.transpose((2, 0, 1)) + out['source'] = source.transpose((2, 0, 1)) + else: + video = np.array(video_array, dtype='float32') + out['video'] = video.transpose((3, 0, 1, 2)) + + out['name'] = video_name + + return out + + +class DatasetRepeater(Dataset): + """ + Pass several times over the same dataset for better i/o performance + """ + + def __init__(self, dataset, num_repeats=100): + self.dataset = dataset + self.num_repeats = num_repeats + + def __len__(self): + return self.num_repeats * self.dataset.__len__() + + def __getitem__(self, idx): + return self.dataset[idx % self.dataset.__len__()] + + +class PairedDataset(Dataset): + """ + Dataset of pairs for animation. + """ + + def __init__(self, initial_dataset, number_of_pairs, seed=0): + self.initial_dataset = initial_dataset + pairs_list = self.initial_dataset.pairs_list + + np.random.seed(seed) + + if pairs_list is None: + max_idx = min(number_of_pairs, len(initial_dataset)) + nx, ny = max_idx, max_idx + xy = np.mgrid[:nx, :ny].reshape(2, -1).T + number_of_pairs = min(xy.shape[0], number_of_pairs) + self.pairs = xy.take(np.random.choice(xy.shape[0], number_of_pairs, replace=False), axis=0) + else: + videos = self.initial_dataset.videos + name_to_index = {name: index for index, name in enumerate(videos)} + pairs = pd.read_csv(pairs_list) + pairs = pairs[np.logical_and(pairs['source'].isin(videos), pairs['driving'].isin(videos))] + + number_of_pairs = min(pairs.shape[0], number_of_pairs) + self.pairs = [] + self.start_frames = [] + for ind in range(number_of_pairs): + self.pairs.append( + (name_to_index[pairs['driving'].iloc[ind]], name_to_index[pairs['source'].iloc[ind]])) + + def __len__(self): + return len(self.pairs) + + def __getitem__(self, idx): + pair = self.pairs[idx] + first = self.initial_dataset[pair[0]] + second = self.initial_dataset[pair[1]] + first = {'driving_' + key: value for key, value in first.items()} + second = {'source_' + key: value for key, value in second.items()} + + return {**first, **second} diff --git a/firstordermodel/logger.py b/firstordermodel/logger.py new file mode 100644 index 0000000..53e3aa9 --- /dev/null +++ b/firstordermodel/logger.py @@ -0,0 +1,211 @@ +import numpy as np +import torch +import torch.nn.functional as F +import imageio + +import os +from skimage.draw import disk + +import matplotlib.pyplot as plt +import collections + + +class Logger: + def __init__(self, log_dir, checkpoint_freq=100, visualizer_params=None, zfill_num=8, log_file_name='log.txt'): + + self.loss_list = [] + self.cpk_dir = log_dir + self.visualizations_dir = os.path.join(log_dir, 'train-vis') + if not os.path.exists(self.visualizations_dir): + os.makedirs(self.visualizations_dir) + self.log_file = open(os.path.join(log_dir, log_file_name), 'a') + self.zfill_num = zfill_num + self.visualizer = Visualizer(**visualizer_params) + self.checkpoint_freq = checkpoint_freq + self.epoch = 0 + self.best_loss = float('inf') + self.names = None + + def log_scores(self, loss_names): + loss_mean = np.array(self.loss_list).mean(axis=0) + + loss_string = "; ".join(["%s - %.5f" % (name, value) for name, value in zip(loss_names, loss_mean)]) + loss_string = str(self.epoch).zfill(self.zfill_num) + ") " + loss_string + + print(loss_string, file=self.log_file) + self.loss_list = [] + self.log_file.flush() + + def visualize_rec(self, inp, out): + image = self.visualizer.visualize(inp['driving'], inp['source'], out) + imageio.imsave(os.path.join(self.visualizations_dir, "%s-rec.png" % str(self.epoch).zfill(self.zfill_num)), image) + + def save_cpk(self, emergent=False): + cpk = {k: v.state_dict() for k, v in self.models.items()} + cpk['epoch'] = self.epoch + cpk_path = os.path.join(self.cpk_dir, '%s-checkpoint.pth.tar' % str(self.epoch).zfill(self.zfill_num)) + if not (os.path.exists(cpk_path) and emergent): + torch.save(cpk, cpk_path) + + @staticmethod + def load_cpk(checkpoint_path, generator=None, discriminator=None, kp_detector=None, + optimizer_generator=None, optimizer_discriminator=None, optimizer_kp_detector=None): + if torch.cuda.is_available(): + map_location = None + else: + map_location = 'cpu' + checkpoint = torch.load(checkpoint_path, map_location) + if generator is not None: + generator.load_state_dict(checkpoint['generator']) + if kp_detector is not None: + kp_detector.load_state_dict(checkpoint['kp_detector']) + if discriminator is not None: + try: + discriminator.load_state_dict(checkpoint['discriminator']) + except: + print ('No discriminator in the state-dict. Dicriminator will be randomly initialized') + if optimizer_generator is not None: + optimizer_generator.load_state_dict(checkpoint['optimizer_generator']) + if optimizer_discriminator is not None: + try: + optimizer_discriminator.load_state_dict(checkpoint['optimizer_discriminator']) + except RuntimeError as e: + print ('No discriminator optimizer in the state-dict. Optimizer will be not initialized') + if optimizer_kp_detector is not None: + optimizer_kp_detector.load_state_dict(checkpoint['optimizer_kp_detector']) + + return checkpoint['epoch'] + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if 'models' in self.__dict__: + self.save_cpk() + self.log_file.close() + + def log_iter(self, losses): + losses = collections.OrderedDict(losses.items()) + if self.names is None: + self.names = list(losses.keys()) + self.loss_list.append(list(losses.values())) + + def log_epoch(self, epoch, models, inp, out): + self.epoch = epoch + self.models = models + if (self.epoch + 1) % self.checkpoint_freq == 0: + self.save_cpk() + self.log_scores(self.names) + self.visualize_rec(inp, out) + + +class Visualizer: + def __init__(self, kp_size=5, draw_border=False, colormap='gist_rainbow'): + self.kp_size = kp_size + self.draw_border = draw_border + self.colormap = plt.get_cmap(colormap) + + def draw_image_with_kp(self, image, kp_array): + image = np.copy(image) + spatial_size = np.array(image.shape[:2][::-1])[np.newaxis] + kp_array = spatial_size * (kp_array + 1) / 2 + num_kp = kp_array.shape[0] + for kp_ind, kp in enumerate(kp_array): + rr, cc = disk(kp[1], kp[0], self.kp_size, shape=image.shape[:2]) + image[rr, cc] = np.array(self.colormap(kp_ind / num_kp))[:3] + return image + + def create_image_column_with_kp(self, images, kp): + image_array = np.array([self.draw_image_with_kp(v, k) for v, k in zip(images, kp)]) + return self.create_image_column(image_array) + + def create_image_column(self, images): + if self.draw_border: + images = np.copy(images) + images[:, :, [0, -1]] = (1, 1, 1) + return np.concatenate(list(images), axis=0) + + def create_image_grid(self, *args): + out = [] + for arg in args: + if type(arg) == tuple: + out.append(self.create_image_column_with_kp(arg[0], arg[1])) + else: + out.append(self.create_image_column(arg)) + return np.concatenate(out, axis=1) + + def visualize(self, driving, source, out): + images = [] + + # Source image with keypoints + source = source.data.cpu() + kp_source = out['kp_source']['value'].data.cpu().numpy() + source = np.transpose(source, [0, 2, 3, 1]) + images.append((source, kp_source)) + + # Equivariance visualization + if 'transformed_frame' in out: + transformed = out['transformed_frame'].data.cpu().numpy() + transformed = np.transpose(transformed, [0, 2, 3, 1]) + transformed_kp = out['transformed_kp']['value'].data.cpu().numpy() + images.append((transformed, transformed_kp)) + + # Driving image with keypoints + kp_driving = out['kp_driving']['value'].data.cpu().numpy() + driving = driving.data.cpu().numpy() + driving = np.transpose(driving, [0, 2, 3, 1]) + images.append((driving, kp_driving)) + + # Deformed image + if 'deformed' in out: + deformed = out['deformed'].data.cpu().numpy() + deformed = np.transpose(deformed, [0, 2, 3, 1]) + images.append(deformed) + + # Result with and without keypoints + prediction = out['prediction'].data.cpu().numpy() + prediction = np.transpose(prediction, [0, 2, 3, 1]) + if 'kp_norm' in out: + kp_norm = out['kp_norm']['value'].data.cpu().numpy() + images.append((prediction, kp_norm)) + images.append(prediction) + + + ## Occlusion map + if 'occlusion_map' in out: + occlusion_map = out['occlusion_map'].data.cpu().repeat(1, 3, 1, 1) + occlusion_map = F.interpolate(occlusion_map, size=source.shape[1:3]).numpy() + occlusion_map = np.transpose(occlusion_map, [0, 2, 3, 1]) + images.append(occlusion_map) + + # Deformed images according to each individual transform + if 'sparse_deformed' in out: + full_mask = [] + for i in range(out['sparse_deformed'].shape[1]): + image = out['sparse_deformed'][:, i].data.cpu() + image = F.interpolate(image, size=source.shape[1:3]) + mask = out['mask'][:, i:(i+1)].data.cpu().repeat(1, 3, 1, 1) + mask = F.interpolate(mask, size=source.shape[1:3]) + image = np.transpose(image.numpy(), (0, 2, 3, 1)) + mask = np.transpose(mask.numpy(), (0, 2, 3, 1)) + + if i != 0: + color = np.array(self.colormap((i - 1) / (out['sparse_deformed'].shape[1] - 1)))[:3] + else: + color = np.array((0, 0, 0)) + + color = color.reshape((1, 1, 1, 3)) + + images.append(image) + if i != 0: + images.append(mask * color) + else: + images.append(mask) + + full_mask.append(mask * color) + + images.append(sum(full_mask)) + + image = self.create_image_grid(*images) + image = (255 * image).astype(np.uint8) + return image diff --git a/firstordermodel/modules/__init__.py b/firstordermodel/modules/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/firstordermodel/modules/dense_motion.py b/firstordermodel/modules/dense_motion.py new file mode 100644 index 0000000..06f7039 --- /dev/null +++ b/firstordermodel/modules/dense_motion.py @@ -0,0 +1,113 @@ +from torch import nn +import torch.nn.functional as F +import torch +from modules.util import Hourglass, AntiAliasInterpolation2d, make_coordinate_grid, kp2gaussian + + +class DenseMotionNetwork(nn.Module): + """ + Module that predicting a dense motion from sparse motion representation given by kp_source and kp_driving + """ + + def __init__(self, block_expansion, num_blocks, max_features, num_kp, num_channels, estimate_occlusion_map=False, + scale_factor=1, kp_variance=0.01): + super(DenseMotionNetwork, self).__init__() + self.hourglass = Hourglass(block_expansion=block_expansion, in_features=(num_kp + 1) * (num_channels + 1), + max_features=max_features, num_blocks=num_blocks) + + self.mask = nn.Conv2d(self.hourglass.out_filters, num_kp + 1, kernel_size=(7, 7), padding=(3, 3)) + + if estimate_occlusion_map: + self.occlusion = nn.Conv2d(self.hourglass.out_filters, 1, kernel_size=(7, 7), padding=(3, 3)) + else: + self.occlusion = None + + self.num_kp = num_kp + self.scale_factor = scale_factor + self.kp_variance = kp_variance + + if self.scale_factor != 1: + self.down = AntiAliasInterpolation2d(num_channels, self.scale_factor) + + def create_heatmap_representations(self, source_image, kp_driving, kp_source): + """ + Eq 6. in the paper H_k(z) + """ + spatial_size = source_image.shape[2:] + gaussian_driving = kp2gaussian(kp_driving, spatial_size=spatial_size, kp_variance=self.kp_variance) + gaussian_source = kp2gaussian(kp_source, spatial_size=spatial_size, kp_variance=self.kp_variance) + heatmap = gaussian_driving - gaussian_source + + #adding background feature + zeros = torch.zeros(heatmap.shape[0], 1, spatial_size[0], spatial_size[1]).type(heatmap.type()) + heatmap = torch.cat([zeros, heatmap], dim=1) + heatmap = heatmap.unsqueeze(2) + return heatmap + + def create_sparse_motions(self, source_image, kp_driving, kp_source): + """ + Eq 4. in the paper T_{s<-d}(z) + """ + bs, _, h, w = source_image.shape + identity_grid = make_coordinate_grid((h, w), type=kp_source['value'].type()) + identity_grid = identity_grid.view(1, 1, h, w, 2) + coordinate_grid = identity_grid - kp_driving['value'].view(bs, self.num_kp, 1, 1, 2) + if 'jacobian' in kp_driving: + jacobian = torch.matmul(kp_source['jacobian'], torch.inverse(kp_driving['jacobian'])) + jacobian = jacobian.unsqueeze(-3).unsqueeze(-3) + jacobian = jacobian.repeat(1, 1, h, w, 1, 1) + coordinate_grid = torch.matmul(jacobian, coordinate_grid.unsqueeze(-1)) + coordinate_grid = coordinate_grid.squeeze(-1) + + driving_to_source = coordinate_grid + kp_source['value'].view(bs, self.num_kp, 1, 1, 2) + + #adding background feature + identity_grid = identity_grid.repeat(bs, 1, 1, 1, 1) + sparse_motions = torch.cat([identity_grid, driving_to_source], dim=1) + return sparse_motions + + def create_deformed_source_image(self, source_image, sparse_motions): + """ + Eq 7. in the paper \hat{T}_{s<-d}(z) + """ + bs, _, h, w = source_image.shape + source_repeat = source_image.unsqueeze(1).unsqueeze(1).repeat(1, self.num_kp + 1, 1, 1, 1, 1) + source_repeat = source_repeat.view(bs * (self.num_kp + 1), -1, h, w) + sparse_motions = sparse_motions.view((bs * (self.num_kp + 1), h, w, -1)) + sparse_deformed = F.grid_sample(source_repeat, sparse_motions) + sparse_deformed = sparse_deformed.view((bs, self.num_kp + 1, -1, h, w)) + return sparse_deformed + + def forward(self, source_image, kp_driving, kp_source): + if self.scale_factor != 1: + source_image = self.down(source_image) + + bs, _, h, w = source_image.shape + + out_dict = dict() + heatmap_representation = self.create_heatmap_representations(source_image, kp_driving, kp_source) + sparse_motion = self.create_sparse_motions(source_image, kp_driving, kp_source) + deformed_source = self.create_deformed_source_image(source_image, sparse_motion) + out_dict['sparse_deformed'] = deformed_source + + input = torch.cat([heatmap_representation, deformed_source], dim=2) + input = input.view(bs, -1, h, w) + + prediction = self.hourglass(input) + + mask = self.mask(prediction) + mask = F.softmax(mask, dim=1) + out_dict['mask'] = mask + mask = mask.unsqueeze(2) + sparse_motion = sparse_motion.permute(0, 1, 4, 2, 3) + deformation = (sparse_motion * mask).sum(dim=1) + deformation = deformation.permute(0, 2, 3, 1) + + out_dict['deformation'] = deformation + + # Sec. 3.2 in the paper + if self.occlusion: + occlusion_map = torch.sigmoid(self.occlusion(prediction)) + out_dict['occlusion_map'] = occlusion_map + + return out_dict diff --git a/firstordermodel/modules/discriminator.py b/firstordermodel/modules/discriminator.py new file mode 100644 index 0000000..8356493 --- /dev/null +++ b/firstordermodel/modules/discriminator.py @@ -0,0 +1,95 @@ +from torch import nn +import torch.nn.functional as F +from modules.util import kp2gaussian +import torch + + +class DownBlock2d(nn.Module): + """ + Simple block for processing video (encoder). + """ + + def __init__(self, in_features, out_features, norm=False, kernel_size=4, pool=False, sn=False): + super(DownBlock2d, self).__init__() + self.conv = nn.Conv2d(in_channels=in_features, out_channels=out_features, kernel_size=kernel_size) + + if sn: + self.conv = nn.utils.spectral_norm(self.conv) + + if norm: + self.norm = nn.InstanceNorm2d(out_features, affine=True) + else: + self.norm = None + self.pool = pool + + def forward(self, x): + out = x + out = self.conv(out) + if self.norm: + out = self.norm(out) + out = F.leaky_relu(out, 0.2) + if self.pool: + out = F.avg_pool2d(out, (2, 2)) + return out + + +class Discriminator(nn.Module): + """ + Discriminator similar to Pix2Pix + """ + + def __init__(self, num_channels=3, block_expansion=64, num_blocks=4, max_features=512, + sn=False, use_kp=False, num_kp=10, kp_variance=0.01, **kwargs): + super(Discriminator, self).__init__() + + down_blocks = [] + for i in range(num_blocks): + down_blocks.append( + DownBlock2d(num_channels + num_kp * use_kp if i == 0 else min(max_features, block_expansion * (2 ** i)), + min(max_features, block_expansion * (2 ** (i + 1))), + norm=(i != 0), kernel_size=4, pool=(i != num_blocks - 1), sn=sn)) + + self.down_blocks = nn.ModuleList(down_blocks) + self.conv = nn.Conv2d(self.down_blocks[-1].conv.out_channels, out_channels=1, kernel_size=1) + if sn: + self.conv = nn.utils.spectral_norm(self.conv) + self.use_kp = use_kp + self.kp_variance = kp_variance + + def forward(self, x, kp=None): + feature_maps = [] + out = x + if self.use_kp: + heatmap = kp2gaussian(kp, x.shape[2:], self.kp_variance) + out = torch.cat([out, heatmap], dim=1) + + for down_block in self.down_blocks: + feature_maps.append(down_block(out)) + out = feature_maps[-1] + prediction_map = self.conv(out) + + return feature_maps, prediction_map + + +class MultiScaleDiscriminator(nn.Module): + """ + Multi-scale (scale) discriminator + """ + + def __init__(self, scales=(), **kwargs): + super(MultiScaleDiscriminator, self).__init__() + self.scales = scales + discs = {} + for scale in scales: + discs[str(scale).replace('.', '-')] = Discriminator(**kwargs) + self.discs = nn.ModuleDict(discs) + + def forward(self, x, kp=None): + out_dict = {} + for scale, disc in self.discs.items(): + scale = str(scale).replace('-', '.') + key = 'prediction_' + scale + feature_maps, prediction_map = disc(x[key], kp) + out_dict['feature_maps_' + scale] = feature_maps + out_dict['prediction_map_' + scale] = prediction_map + return out_dict diff --git a/firstordermodel/modules/generator.py b/firstordermodel/modules/generator.py new file mode 100644 index 0000000..ec66570 --- /dev/null +++ b/firstordermodel/modules/generator.py @@ -0,0 +1,97 @@ +import torch +from torch import nn +import torch.nn.functional as F +from modules.util import ResBlock2d, SameBlock2d, UpBlock2d, DownBlock2d +from modules.dense_motion import DenseMotionNetwork + + +class OcclusionAwareGenerator(nn.Module): + """ + Generator that given source image and and keypoints try to transform image according to movement trajectories + induced by keypoints. Generator follows Johnson architecture. + """ + + def __init__(self, num_channels, num_kp, block_expansion, max_features, num_down_blocks, + num_bottleneck_blocks, estimate_occlusion_map=False, dense_motion_params=None, estimate_jacobian=False): + super(OcclusionAwareGenerator, self).__init__() + + if dense_motion_params is not None: + self.dense_motion_network = DenseMotionNetwork(num_kp=num_kp, num_channels=num_channels, + estimate_occlusion_map=estimate_occlusion_map, + **dense_motion_params) + else: + self.dense_motion_network = None + + self.first = SameBlock2d(num_channels, block_expansion, kernel_size=(7, 7), padding=(3, 3)) + + down_blocks = [] + for i in range(num_down_blocks): + in_features = min(max_features, block_expansion * (2 ** i)) + out_features = min(max_features, block_expansion * (2 ** (i + 1))) + down_blocks.append(DownBlock2d(in_features, out_features, kernel_size=(3, 3), padding=(1, 1))) + self.down_blocks = nn.ModuleList(down_blocks) + + up_blocks = [] + for i in range(num_down_blocks): + in_features = min(max_features, block_expansion * (2 ** (num_down_blocks - i))) + out_features = min(max_features, block_expansion * (2 ** (num_down_blocks - i - 1))) + up_blocks.append(UpBlock2d(in_features, out_features, kernel_size=(3, 3), padding=(1, 1))) + self.up_blocks = nn.ModuleList(up_blocks) + + self.bottleneck = torch.nn.Sequential() + in_features = min(max_features, block_expansion * (2 ** num_down_blocks)) + for i in range(num_bottleneck_blocks): + self.bottleneck.add_module('r' + str(i), ResBlock2d(in_features, kernel_size=(3, 3), padding=(1, 1))) + + self.final = nn.Conv2d(block_expansion, num_channels, kernel_size=(7, 7), padding=(3, 3)) + self.estimate_occlusion_map = estimate_occlusion_map + self.num_channels = num_channels + + def deform_input(self, inp, deformation): + _, h_old, w_old, _ = deformation.shape + _, _, h, w = inp.shape + if h_old != h or w_old != w: + deformation = deformation.permute(0, 3, 1, 2) + deformation = F.interpolate(deformation, size=(h, w), mode='bilinear') + deformation = deformation.permute(0, 2, 3, 1) + return F.grid_sample(inp, deformation) + + def forward(self, source_image, kp_driving, kp_source): + # Encoding (downsampling) part + out = self.first(source_image) + for i in range(len(self.down_blocks)): + out = self.down_blocks[i](out) + + # Transforming feature representation according to deformation and occlusion + output_dict = {} + if self.dense_motion_network is not None: + dense_motion = self.dense_motion_network(source_image=source_image, kp_driving=kp_driving, + kp_source=kp_source) + output_dict['mask'] = dense_motion['mask'] + output_dict['sparse_deformed'] = dense_motion['sparse_deformed'] + + if 'occlusion_map' in dense_motion: + occlusion_map = dense_motion['occlusion_map'] + output_dict['occlusion_map'] = occlusion_map + else: + occlusion_map = None + deformation = dense_motion['deformation'] + out = self.deform_input(out, deformation) + + if occlusion_map is not None: + if out.shape[2] != occlusion_map.shape[2] or out.shape[3] != occlusion_map.shape[3]: + occlusion_map = F.interpolate(occlusion_map, size=out.shape[2:], mode='bilinear') + out = out * occlusion_map + + output_dict["deformed"] = self.deform_input(source_image, deformation) + + # Decoding part + out = self.bottleneck(out) + for i in range(len(self.up_blocks)): + out = self.up_blocks[i](out) + out = self.final(out) + out = F.sigmoid(out) + + output_dict["prediction"] = out + + return output_dict diff --git a/firstordermodel/modules/keypoint_detector.py b/firstordermodel/modules/keypoint_detector.py new file mode 100644 index 0000000..33f9f1d --- /dev/null +++ b/firstordermodel/modules/keypoint_detector.py @@ -0,0 +1,75 @@ +from torch import nn +import torch +import torch.nn.functional as F +from modules.util import Hourglass, make_coordinate_grid, AntiAliasInterpolation2d + + +class KPDetector(nn.Module): + """ + Detecting a keypoints. Return keypoint position and jacobian near each keypoint. + """ + + def __init__(self, block_expansion, num_kp, num_channels, max_features, + num_blocks, temperature, estimate_jacobian=False, scale_factor=1, + single_jacobian_map=False, pad=0): + super(KPDetector, self).__init__() + + self.predictor = Hourglass(block_expansion, in_features=num_channels, + max_features=max_features, num_blocks=num_blocks) + + self.kp = nn.Conv2d(in_channels=self.predictor.out_filters, out_channels=num_kp, kernel_size=(7, 7), + padding=pad) + + if estimate_jacobian: + self.num_jacobian_maps = 1 if single_jacobian_map else num_kp + self.jacobian = nn.Conv2d(in_channels=self.predictor.out_filters, + out_channels=4 * self.num_jacobian_maps, kernel_size=(7, 7), padding=pad) + self.jacobian.weight.data.zero_() + self.jacobian.bias.data.copy_(torch.tensor([1, 0, 0, 1] * self.num_jacobian_maps, dtype=torch.float)) + else: + self.jacobian = None + + self.temperature = temperature + self.scale_factor = scale_factor + if self.scale_factor != 1: + self.down = AntiAliasInterpolation2d(num_channels, self.scale_factor) + + def gaussian2kp(self, heatmap): + """ + Extract the mean and from a heatmap + """ + shape = heatmap.shape + heatmap = heatmap.unsqueeze(-1) + grid = make_coordinate_grid(shape[2:], heatmap.type()).unsqueeze_(0).unsqueeze_(0) + value = (heatmap * grid).sum(dim=(2, 3)) + kp = {'value': value} + + return kp + + def forward(self, x): + if self.scale_factor != 1: + x = self.down(x) + + feature_map = self.predictor(x) + prediction = self.kp(feature_map) + + final_shape = prediction.shape + heatmap = prediction.view(final_shape[0], final_shape[1], -1) + heatmap = F.softmax(heatmap / self.temperature, dim=2) + heatmap = heatmap.view(*final_shape) + + out = self.gaussian2kp(heatmap) + + if self.jacobian is not None: + jacobian_map = self.jacobian(feature_map) + jacobian_map = jacobian_map.reshape(final_shape[0], self.num_jacobian_maps, 4, final_shape[2], + final_shape[3]) + heatmap = heatmap.unsqueeze(2) + + jacobian = heatmap * jacobian_map + jacobian = jacobian.view(final_shape[0], final_shape[1], 4, -1) + jacobian = jacobian.sum(dim=-1) + jacobian = jacobian.view(jacobian.shape[0], jacobian.shape[1], 2, 2) + out['jacobian'] = jacobian + + return out diff --git a/firstordermodel/modules/model.py b/firstordermodel/modules/model.py new file mode 100644 index 0000000..4f3c671 --- /dev/null +++ b/firstordermodel/modules/model.py @@ -0,0 +1,259 @@ +from torch import nn +import torch +import torch.nn.functional as F +from modules.util import AntiAliasInterpolation2d, make_coordinate_grid +from torchvision import models +import numpy as np +from torch.autograd import grad + + +class Vgg19(torch.nn.Module): + """ + Vgg19 network for perceptual loss. See Sec 3.3. + """ + def __init__(self, requires_grad=False): + super(Vgg19, self).__init__() + vgg_pretrained_features = models.vgg19(pretrained=True).features + self.slice1 = torch.nn.Sequential() + self.slice2 = torch.nn.Sequential() + self.slice3 = torch.nn.Sequential() + self.slice4 = torch.nn.Sequential() + self.slice5 = torch.nn.Sequential() + for x in range(2): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(2, 7): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(7, 12): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(12, 21): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + for x in range(21, 30): + self.slice5.add_module(str(x), vgg_pretrained_features[x]) + + self.mean = torch.nn.Parameter(data=torch.Tensor(np.array([0.485, 0.456, 0.406]).reshape((1, 3, 1, 1))), + requires_grad=False) + self.std = torch.nn.Parameter(data=torch.Tensor(np.array([0.229, 0.224, 0.225]).reshape((1, 3, 1, 1))), + requires_grad=False) + + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, X): + X = (X - self.mean) / self.std + h_relu1 = self.slice1(X) + h_relu2 = self.slice2(h_relu1) + h_relu3 = self.slice3(h_relu2) + h_relu4 = self.slice4(h_relu3) + h_relu5 = self.slice5(h_relu4) + out = [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5] + return out + + +class ImagePyramide(torch.nn.Module): + """ + Create image pyramide for computing pyramide perceptual loss. See Sec 3.3 + """ + def __init__(self, scales, num_channels): + super(ImagePyramide, self).__init__() + downs = {} + for scale in scales: + downs[str(scale).replace('.', '-')] = AntiAliasInterpolation2d(num_channels, scale) + self.downs = nn.ModuleDict(downs) + + def forward(self, x): + out_dict = {} + for scale, down_module in self.downs.items(): + out_dict['prediction_' + str(scale).replace('-', '.')] = down_module(x) + return out_dict + + +class Transform: + """ + Random tps transformation for equivariance constraints. See Sec 3.3 + """ + def __init__(self, bs, **kwargs): + noise = torch.normal(mean=0, std=kwargs['sigma_affine'] * torch.ones([bs, 2, 3])) + self.theta = noise + torch.eye(2, 3).view(1, 2, 3) + self.bs = bs + + if ('sigma_tps' in kwargs) and ('points_tps' in kwargs): + self.tps = True + self.control_points = make_coordinate_grid((kwargs['points_tps'], kwargs['points_tps']), type=noise.type()) + self.control_points = self.control_points.unsqueeze(0) + self.control_params = torch.normal(mean=0, + std=kwargs['sigma_tps'] * torch.ones([bs, 1, kwargs['points_tps'] ** 2])) + else: + self.tps = False + + def transform_frame(self, frame): + grid = make_coordinate_grid(frame.shape[2:], type=frame.type()).unsqueeze(0) + grid = grid.view(1, frame.shape[2] * frame.shape[3], 2) + grid = self.warp_coordinates(grid).view(self.bs, frame.shape[2], frame.shape[3], 2) + return F.grid_sample(frame, grid, padding_mode="reflection") + + def warp_coordinates(self, coordinates): + theta = self.theta.type(coordinates.type()) + theta = theta.unsqueeze(1) + transformed = torch.matmul(theta[:, :, :, :2], coordinates.unsqueeze(-1)) + theta[:, :, :, 2:] + transformed = transformed.squeeze(-1) + + if self.tps: + control_points = self.control_points.type(coordinates.type()) + control_params = self.control_params.type(coordinates.type()) + distances = coordinates.view(coordinates.shape[0], -1, 1, 2) - control_points.view(1, 1, -1, 2) + distances = torch.abs(distances).sum(-1) + + result = distances ** 2 + result = result * torch.log(distances + 1e-6) + result = result * control_params + result = result.sum(dim=2).view(self.bs, coordinates.shape[1], 1) + transformed = transformed + result + + return transformed + + def jacobian(self, coordinates): + new_coordinates = self.warp_coordinates(coordinates) + grad_x = grad(new_coordinates[..., 0].sum(), coordinates, create_graph=True) + grad_y = grad(new_coordinates[..., 1].sum(), coordinates, create_graph=True) + jacobian = torch.cat([grad_x[0].unsqueeze(-2), grad_y[0].unsqueeze(-2)], dim=-2) + return jacobian + + +def detach_kp(kp): + return {key: value.detach() for key, value in kp.items()} + + +class GeneratorFullModel(torch.nn.Module): + """ + Merge all generator related updates into single model for better multi-gpu usage + """ + + def __init__(self, kp_extractor, generator, discriminator, train_params): + super(GeneratorFullModel, self).__init__() + self.kp_extractor = kp_extractor + self.generator = generator + self.discriminator = discriminator + self.train_params = train_params + self.scales = train_params['scales'] + self.disc_scales = self.discriminator.scales + self.pyramid = ImagePyramide(self.scales, generator.num_channels) + if torch.cuda.is_available(): + self.pyramid = self.pyramid.cuda() + + self.loss_weights = train_params['loss_weights'] + + if sum(self.loss_weights['perceptual']) != 0: + self.vgg = Vgg19() + if torch.cuda.is_available(): + self.vgg = self.vgg.cuda() + + def forward(self, x): + kp_source = self.kp_extractor(x['source']) + kp_driving = self.kp_extractor(x['driving']) + + generated = self.generator(x['source'], kp_source=kp_source, kp_driving=kp_driving) + generated.update({'kp_source': kp_source, 'kp_driving': kp_driving}) + + loss_values = {} + + pyramide_real = self.pyramid(x['driving']) + pyramide_generated = self.pyramid(generated['prediction']) + + if sum(self.loss_weights['perceptual']) != 0: + value_total = 0 + for scale in self.scales: + x_vgg = self.vgg(pyramide_generated['prediction_' + str(scale)]) + y_vgg = self.vgg(pyramide_real['prediction_' + str(scale)]) + + for i, weight in enumerate(self.loss_weights['perceptual']): + value = torch.abs(x_vgg[i] - y_vgg[i].detach()).mean() + value_total += self.loss_weights['perceptual'][i] * value + loss_values['perceptual'] = value_total + + if self.loss_weights['generator_gan'] != 0: + discriminator_maps_generated = self.discriminator(pyramide_generated, kp=detach_kp(kp_driving)) + discriminator_maps_real = self.discriminator(pyramide_real, kp=detach_kp(kp_driving)) + value_total = 0 + for scale in self.disc_scales: + key = 'prediction_map_%s' % scale + value = ((1 - discriminator_maps_generated[key]) ** 2).mean() + value_total += self.loss_weights['generator_gan'] * value + loss_values['gen_gan'] = value_total + + if sum(self.loss_weights['feature_matching']) != 0: + value_total = 0 + for scale in self.disc_scales: + key = 'feature_maps_%s' % scale + for i, (a, b) in enumerate(zip(discriminator_maps_real[key], discriminator_maps_generated[key])): + if self.loss_weights['feature_matching'][i] == 0: + continue + value = torch.abs(a - b).mean() + value_total += self.loss_weights['feature_matching'][i] * value + loss_values['feature_matching'] = value_total + + if (self.loss_weights['equivariance_value'] + self.loss_weights['equivariance_jacobian']) != 0: + transform = Transform(x['driving'].shape[0], **self.train_params['transform_params']) + transformed_frame = transform.transform_frame(x['driving']) + transformed_kp = self.kp_extractor(transformed_frame) + + generated['transformed_frame'] = transformed_frame + generated['transformed_kp'] = transformed_kp + + ## Value loss part + if self.loss_weights['equivariance_value'] != 0: + value = torch.abs(kp_driving['value'] - transform.warp_coordinates(transformed_kp['value'])).mean() + loss_values['equivariance_value'] = self.loss_weights['equivariance_value'] * value + + ## jacobian loss part + if self.loss_weights['equivariance_jacobian'] != 0: + jacobian_transformed = torch.matmul(transform.jacobian(transformed_kp['value']), + transformed_kp['jacobian']) + + normed_driving = torch.inverse(kp_driving['jacobian']) + normed_transformed = jacobian_transformed + value = torch.matmul(normed_driving, normed_transformed) + + eye = torch.eye(2).view(1, 1, 2, 2).type(value.type()) + + value = torch.abs(eye - value).mean() + loss_values['equivariance_jacobian'] = self.loss_weights['equivariance_jacobian'] * value + + return loss_values, generated + + +class DiscriminatorFullModel(torch.nn.Module): + """ + Merge all discriminator related updates into single model for better multi-gpu usage + """ + + def __init__(self, kp_extractor, generator, discriminator, train_params): + super(DiscriminatorFullModel, self).__init__() + self.kp_extractor = kp_extractor + self.generator = generator + self.discriminator = discriminator + self.train_params = train_params + self.scales = self.discriminator.scales + self.pyramid = ImagePyramide(self.scales, generator.num_channels) + if torch.cuda.is_available(): + self.pyramid = self.pyramid.cuda() + + self.loss_weights = train_params['loss_weights'] + + def forward(self, x, generated): + pyramide_real = self.pyramid(x['driving']) + pyramide_generated = self.pyramid(generated['prediction'].detach()) + + kp_driving = generated['kp_driving'] + discriminator_maps_generated = self.discriminator(pyramide_generated, kp=detach_kp(kp_driving)) + discriminator_maps_real = self.discriminator(pyramide_real, kp=detach_kp(kp_driving)) + + loss_values = {} + value_total = 0 + for scale in self.scales: + key = 'prediction_map_%s' % scale + value = (1 - discriminator_maps_real[key]) ** 2 + discriminator_maps_generated[key] ** 2 + value_total += self.loss_weights['discriminator_gan'] * value.mean() + loss_values['disc_gan'] = value_total + + return loss_values \ No newline at end of file diff --git a/firstordermodel/modules/util.py b/firstordermodel/modules/util.py new file mode 100644 index 0000000..831fea4 --- /dev/null +++ b/firstordermodel/modules/util.py @@ -0,0 +1,245 @@ +from torch import nn + +import torch.nn.functional as F +import torch + +from sync_batchnorm import SynchronizedBatchNorm2d as BatchNorm2d + + +def kp2gaussian(kp, spatial_size, kp_variance): + """ + Transform a keypoint into gaussian like representation + """ + mean = kp['value'] + + coordinate_grid = make_coordinate_grid(spatial_size, mean.type()) + number_of_leading_dimensions = len(mean.shape) - 1 + shape = (1,) * number_of_leading_dimensions + coordinate_grid.shape + coordinate_grid = coordinate_grid.view(*shape) + repeats = mean.shape[:number_of_leading_dimensions] + (1, 1, 1) + coordinate_grid = coordinate_grid.repeat(*repeats) + + # Preprocess kp shape + shape = mean.shape[:number_of_leading_dimensions] + (1, 1, 2) + mean = mean.view(*shape) + + mean_sub = (coordinate_grid - mean) + + out = torch.exp(-0.5 * (mean_sub ** 2).sum(-1) / kp_variance) + + return out + + +def make_coordinate_grid(spatial_size, type): + """ + Create a meshgrid [-1,1] x [-1,1] of given spatial_size. + """ + h, w = spatial_size + x = torch.arange(w).type(type) + y = torch.arange(h).type(type) + + x = (2 * (x / (w - 1)) - 1) + y = (2 * (y / (h - 1)) - 1) + + yy = y.view(-1, 1).repeat(1, w) + xx = x.view(1, -1).repeat(h, 1) + + meshed = torch.cat([xx.unsqueeze_(2), yy.unsqueeze_(2)], 2) + + return meshed + + +class ResBlock2d(nn.Module): + """ + Res block, preserve spatial resolution. + """ + + def __init__(self, in_features, kernel_size, padding): + super(ResBlock2d, self).__init__() + self.conv1 = nn.Conv2d(in_channels=in_features, out_channels=in_features, kernel_size=kernel_size, + padding=padding) + self.conv2 = nn.Conv2d(in_channels=in_features, out_channels=in_features, kernel_size=kernel_size, + padding=padding) + self.norm1 = BatchNorm2d(in_features, affine=True) + self.norm2 = BatchNorm2d(in_features, affine=True) + + def forward(self, x): + out = self.norm1(x) + out = F.relu(out) + out = self.conv1(out) + out = self.norm2(out) + out = F.relu(out) + out = self.conv2(out) + out += x + return out + + +class UpBlock2d(nn.Module): + """ + Upsampling block for use in decoder. + """ + + def __init__(self, in_features, out_features, kernel_size=3, padding=1, groups=1): + super(UpBlock2d, self).__init__() + + self.conv = nn.Conv2d(in_channels=in_features, out_channels=out_features, kernel_size=kernel_size, + padding=padding, groups=groups) + self.norm = BatchNorm2d(out_features, affine=True) + + def forward(self, x): + out = F.interpolate(x, scale_factor=2) + out = self.conv(out) + out = self.norm(out) + out = F.relu(out) + return out + + +class DownBlock2d(nn.Module): + """ + Downsampling block for use in encoder. + """ + + def __init__(self, in_features, out_features, kernel_size=3, padding=1, groups=1): + super(DownBlock2d, self).__init__() + self.conv = nn.Conv2d(in_channels=in_features, out_channels=out_features, kernel_size=kernel_size, + padding=padding, groups=groups) + self.norm = BatchNorm2d(out_features, affine=True) + self.pool = nn.AvgPool2d(kernel_size=(2, 2)) + + def forward(self, x): + out = self.conv(x) + out = self.norm(out) + out = F.relu(out) + out = self.pool(out) + return out + + +class SameBlock2d(nn.Module): + """ + Simple block, preserve spatial resolution. + """ + + def __init__(self, in_features, out_features, groups=1, kernel_size=3, padding=1): + super(SameBlock2d, self).__init__() + self.conv = nn.Conv2d(in_channels=in_features, out_channels=out_features, + kernel_size=kernel_size, padding=padding, groups=groups) + self.norm = BatchNorm2d(out_features, affine=True) + + def forward(self, x): + out = self.conv(x) + out = self.norm(out) + out = F.relu(out) + return out + + +class Encoder(nn.Module): + """ + Hourglass Encoder + """ + + def __init__(self, block_expansion, in_features, num_blocks=3, max_features=256): + super(Encoder, self).__init__() + + down_blocks = [] + for i in range(num_blocks): + down_blocks.append(DownBlock2d(in_features if i == 0 else min(max_features, block_expansion * (2 ** i)), + min(max_features, block_expansion * (2 ** (i + 1))), + kernel_size=3, padding=1)) + self.down_blocks = nn.ModuleList(down_blocks) + + def forward(self, x): + outs = [x] + for down_block in self.down_blocks: + outs.append(down_block(outs[-1])) + return outs + + +class Decoder(nn.Module): + """ + Hourglass Decoder + """ + + def __init__(self, block_expansion, in_features, num_blocks=3, max_features=256): + super(Decoder, self).__init__() + + up_blocks = [] + + for i in range(num_blocks)[::-1]: + in_filters = (1 if i == num_blocks - 1 else 2) * min(max_features, block_expansion * (2 ** (i + 1))) + out_filters = min(max_features, block_expansion * (2 ** i)) + up_blocks.append(UpBlock2d(in_filters, out_filters, kernel_size=3, padding=1)) + + self.up_blocks = nn.ModuleList(up_blocks) + self.out_filters = block_expansion + in_features + + def forward(self, x): + out = x.pop() + for up_block in self.up_blocks: + out = up_block(out) + skip = x.pop() + out = torch.cat([out, skip], dim=1) + return out + + +class Hourglass(nn.Module): + """ + Hourglass architecture. + """ + + def __init__(self, block_expansion, in_features, num_blocks=3, max_features=256): + super(Hourglass, self).__init__() + self.encoder = Encoder(block_expansion, in_features, num_blocks, max_features) + self.decoder = Decoder(block_expansion, in_features, num_blocks, max_features) + self.out_filters = self.decoder.out_filters + + def forward(self, x): + return self.decoder(self.encoder(x)) + + +class AntiAliasInterpolation2d(nn.Module): + """ + Band-limited downsampling, for better preservation of the input signal. + """ + def __init__(self, channels, scale): + super(AntiAliasInterpolation2d, self).__init__() + sigma = (1 / scale - 1) / 2 + kernel_size = 2 * round(sigma * 4) + 1 + self.ka = kernel_size // 2 + self.kb = self.ka - 1 if kernel_size % 2 == 0 else self.ka + + kernel_size = [kernel_size, kernel_size] + sigma = [sigma, sigma] + # The gaussian kernel is the product of the + # gaussian function of each dimension. + kernel = 1 + meshgrids = torch.meshgrid( + [ + torch.arange(size, dtype=torch.float32) + for size in kernel_size + ] + ) + for size, std, mgrid in zip(kernel_size, sigma, meshgrids): + mean = (size - 1) / 2 + kernel *= torch.exp(-(mgrid - mean) ** 2 / (2 * std ** 2)) + + # Make sure sum of values in gaussian kernel equals 1. + kernel = kernel / torch.sum(kernel) + # Reshape to depthwise convolutional weight + kernel = kernel.view(1, 1, *kernel.size()) + kernel = kernel.repeat(channels, *[1] * (kernel.dim() - 1)) + + self.register_buffer('weight', kernel) + self.groups = channels + self.scale = scale + inv_scale = 1 / scale + self.int_inv_scale = int(inv_scale) + + def forward(self, input): + if self.scale == 1.0: + return input + + out = F.pad(input, (self.ka, self.kb, self.ka, self.kb)) + out = F.conv2d(out, weight=self.weight, groups=self.groups) + out = out[:, :, ::self.int_inv_scale, ::self.int_inv_scale] + + return out diff --git a/firstordermodel/old_demo.ipynb b/firstordermodel/old_demo.ipynb new file mode 100644 index 0000000..13c0b6e --- /dev/null +++ b/firstordermodel/old_demo.ipynb @@ -0,0 +1,12226 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "first-order-model-demo.ipynb", + "provenance": [], + "toc_visible": true, + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cdO_RxQZLahB" + }, + "source": [ + "# Demo for paper \"First Order Motion Model for Image Animation\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "GCDNKsEGLtR6" + }, + "source": [ + "**Clone repository**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "UCMFMJV7K-ag", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 104 + }, + "outputId": "836efe50-65d7-4c95-a17e-13fc3922b491" + }, + "source": [ + "!git clone https://github.com/AliaksandrSiarohin/first-order-model" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cloning into 'first-order-model'...\n", + "remote: Enumerating objects: 246, done.\u001b[K\n", + "remote: Total 246 (delta 0), reused 0 (delta 0), pack-reused 246\u001b[K\n", + "Receiving objects: 100% (246/246), 71.46 MiB | 22.54 MiB/s, done.\n", + "Resolving deltas: 100% (121/121), done.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "PBp6l_4bBYUL", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "702bdf2c-d6d2-41ec-cf3c-9196452544d8" + }, + "source": [ + "cd first-order-model" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/first-order-model\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "IcMX7ueZO0Oa" + }, + "source": [ + "**Mount your Google drive folder on Colab**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "tDbMA8R9OuUo", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "f92ce039-d693-4e03-879b-d0d49a121da2" + }, + "source": [ + "from google.colab import drive\n", + "drive.mount('/content/gdrive')" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Mounted at /content/gdrive\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "VsgVK1EURXkd" + }, + "source": [ + "**Add folder https://drive.google.com/drive/folders/1kZ1gCnpfU0BnpdU47pLM_TQ6RypDDqgw?usp=sharing to your google drive.\n", + "Alternativelly you can use this mirror link https://drive.google.com/drive/folders/16inDpBRPT1UC0YMGMX3dKvRnOUsf5Dhn?usp=sharing**" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "rW-ipQXPOWUo" + }, + "source": [ + "**Load driving video and source image**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Oxi6-riLOgnm", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 453 + }, + "outputId": "d38a8850-9eb1-4de4-9bf2-24cbd847ca1f" + }, + "source": [ + "import imageio\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.animation as animation\n", + "from skimage.transform import resize\n", + "from IPython.display import HTML\n", + "import warnings\n", + "warnings.filterwarnings(\"ignore\")\n", + "\n", + "source_image = imageio.imread('/content/gdrive/My Drive/first-order-motion-model/02.png')\n", + "reader = imageio.get_reader('/content/gdrive/My Drive/first-order-motion-model/04.mp4')\n", + "\n", + "\n", + "#Resize image and video to 256x256\n", + "\n", + "source_image = resize(source_image, (256, 256))[..., :3]\n", + "\n", + "fps = reader.get_meta_data()['fps']\n", + "driving_video = []\n", + "try:\n", + " for im in reader:\n", + " driving_video.append(im)\n", + "except RuntimeError:\n", + " pass\n", + "reader.close()\n", + "\n", + "driving_video = [resize(frame, (256, 256))[..., :3] for frame in driving_video]\n", + "\n", + "def display(source, driving, generated=None):\n", + " fig = plt.figure(figsize=(8 + 4 * (generated is not None), 6))\n", + "\n", + " ims = []\n", + " for i in range(len(driving)):\n", + " cols = [source]\n", + " cols.append(driving[i])\n", + " if generated is not None:\n", + " cols.append(generated[i])\n", + " im = plt.imshow(np.concatenate(cols, axis=1), animated=True)\n", + " plt.axis('off')\n", + " ims.append([im])\n", + "\n", + " ani = animation.ArtistAnimation(fig, ims, interval=50, repeat_delay=1000)\n", + " plt.close()\n", + " return ani\n", + " \n", + "\n", + "HTML(display(source_image, driving_video).to_html5_video())" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 5 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "xjM7ubVfWrwT" + }, + "source": [ + "**Create a model and load checkpoints**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "3FQiXqQPWt5B" + }, + "source": [ + "from demo import load_checkpoints\n", + "generator, kp_detector = load_checkpoints(config_path='config/vox-256.yaml', \n", + " checkpoint_path='/content/gdrive/My Drive/first-order-motion-model/vox-cpk.pth.tar')" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "fdFdasHEj3t7" + }, + "source": [ + "**Perform image animation**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "SB12II11kF4c", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 471 + }, + "outputId": "9e2274aa-fd55-4eed-cb50-bec72fcfb8b9" + }, + "source": [ + "from demo import make_animation\n", + "from skimage import img_as_ubyte\n", + "\n", + "predictions = make_animation(source_image, driving_video, generator, kp_detector, relative=True)\n", + "\n", + "#save resulting video\n", + "imageio.mimsave('../generated.mp4', [img_as_ubyte(frame) for frame in predictions], fps=fps)\n", + "#video can be downloaded from /content folder\n", + "\n", + "HTML(display(source_image, driving_video, predictions).to_html5_video())" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "100%|██████████| 211/211 [00:26<00:00, 7.92it/s]\n" + ], + "name": "stderr" + }, + { + "output_type": "execute_result", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 7 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "-tJN01xQCpqH" + }, + "source": [ + "**In the cell above we use relative keypoint displacement to animate the objects. We can use absolute coordinates instead, but in this way all the object proporions will be inherited from the driving video. For example Putin haircut will be extended to match Trump haircut.**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "aOE_W_kfC9aX", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 471 + }, + "outputId": "f472a888-0200-4b21-b6d2-b6f6737bc9e5" + }, + "source": [ + "predictions = make_animation(source_image, driving_video, generator, kp_detector, relative=False, adapt_movement_scale=True)\n", + "HTML(display(source_image, driving_video, predictions).to_html5_video())" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "100%|██████████| 211/211 [00:26<00:00, 7.90it/s]\n" + ], + "name": "stderr" + }, + { + "output_type": "execute_result", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 8 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QnXrecuX6_Kw" + }, + "source": [ + "## Running on your data\n", + "\n", + "**First we need to crop a face from both source image and video, while simple graphic editor like paint can be used for cropping from image. Cropping from video is more complicated. You can use ffpmeg for this.**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "brJlA_5o72Xc", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "c2becb7e-e2ee-4651-ee8a-f906b5663417" + }, + "source": [ + "!ffmpeg -i /content/gdrive/My\\ Drive/first-order-motion-model/07.mkv -ss 00:08:57.50 -t 00:00:08 -filter:v \"crop=600:600:760:50\" -async 1 hinton.mp4" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "ffmpeg version 3.4.8-0ubuntu0.2 Copyright (c) 2000-2020 the FFmpeg developers\n", + " built with gcc 7 (Ubuntu 7.5.0-3ubuntu1~18.04)\n", + " configuration: --prefix=/usr --extra-version=0ubuntu0.2 --toolchain=hardened --libdir=/usr/lib/x86_64-linux-gnu --incdir=/usr/include/x86_64-linux-gnu --enable-gpl --disable-stripping --enable-avresample --enable-avisynth --enable-gnutls --enable-ladspa --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libcdio --enable-libflite --enable-libfontconfig --enable-libfreetype --enable-libfribidi --enable-libgme --enable-libgsm --enable-libmp3lame --enable-libmysofa --enable-libopenjpeg --enable-libopenmpt --enable-libopus --enable-libpulse --enable-librubberband --enable-librsvg --enable-libshine --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libssh --enable-libtheora --enable-libtwolame --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx265 --enable-libxml2 --enable-libxvid --enable-libzmq --enable-libzvbi --enable-omx --enable-openal --enable-opengl --enable-sdl2 --enable-libdc1394 --enable-libdrm --enable-libiec61883 --enable-chromaprint --enable-frei0r --enable-libopencv --enable-libx264 --enable-shared\n", + " libavutil 55. 78.100 / 55. 78.100\n", + " libavcodec 57.107.100 / 57.107.100\n", + " libavformat 57. 83.100 / 57. 83.100\n", + " libavdevice 57. 10.100 / 57. 10.100\n", + " libavfilter 6.107.100 / 6.107.100\n", + " libavresample 3. 7. 0 / 3. 7. 0\n", + " libswscale 4. 8.100 / 4. 8.100\n", + " libswresample 2. 9.100 / 2. 9.100\n", + " libpostproc 54. 7.100 / 54. 7.100\n", + "Input #0, matroska,webm, from '/content/gdrive/My Drive/first-order-motion-model/07.mkv':\n", + " Metadata:\n", + " ENCODER : Lavf57.83.100\n", + " Duration: 00:14:59.73, start: 0.000000, bitrate: 2343 kb/s\n", + " Stream #0:0(eng): Video: vp9 (Profile 0), yuv420p(tv, bt709), 1920x1080, SAR 1:1 DAR 16:9, 29.97 fps, 29.97 tbr, 1k tbn, 1k tbc (default)\n", + " Metadata:\n", + " DURATION : 00:14:59.665000000\n", + " Stream #0:1(eng): Audio: aac (LC), 44100 Hz, stereo, fltp (default)\n", + " Metadata:\n", + " HANDLER_NAME : SoundHandler\n", + " DURATION : 00:14:59.727000000\n", + "Stream mapping:\n", + " Stream #0:0 -> #0:0 (vp9 (native) -> h264 (libx264))\n", + " Stream #0:1 -> #0:1 (aac (native) -> aac (native))\n", + "Press [q] to stop, [?] for help\n", + "-async is forwarded to lavfi similarly to -af aresample=async=1:min_hard_comp=0.100000:first_pts=0.\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0musing SAR=1/1\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0musing cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mprofile High, level 3.1\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0m264 - core 152 r2854 e9a5903 - H.264/MPEG-4 AVC codec - Copyleft 2003-2017 - http://www.videolan.org/x264.html - options: cabac=1 ref=3 deblock=1:0:0 analyse=0x3:0x113 me=hex subme=7 psy=1 psy_rd=1.00:0.00 mixed_ref=1 me_range=16 chroma_me=1 trellis=1 8x8dct=1 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=-2 threads=3 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=3 b_pyramid=2 b_adapt=1 b_bias=0 direct=1 weightb=1 open_gop=0 weightp=2 keyint=250 keyint_min=25 scenecut=40 intra_refresh=0 rc_lookahead=40 rc=crf mbtree=1 crf=23.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=1:1.00\n", + "Output #0, mp4, to 'hinton.mp4':\n", + " Metadata:\n", + " encoder : Lavf57.83.100\n", + " Stream #0:0(eng): Video: h264 (libx264) (avc1 / 0x31637661), yuv420p, 600x600 [SAR 1:1 DAR 1:1], q=-1--1, 29.97 fps, 30k tbn, 29.97 tbc (default)\n", + " Metadata:\n", + " DURATION : 00:14:59.665000000\n", + " encoder : Lavc57.107.100 libx264\n", + " Side data:\n", + " cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: -1\n", + " Stream #0:1(eng): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, stereo, fltp, 128 kb/s (default)\n", + " Metadata:\n", + " HANDLER_NAME : SoundHandler\n", + " DURATION : 00:14:59.727000000\n", + " encoder : Lavc57.107.100 aac\n", + "frame= 240 fps=2.5 q=-1.0 Lsize= 1301kB time=00:00:08.01 bitrate=1330.6kbits/s speed=0.0844x \n", + "video:1166kB audio:125kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.761764%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mframe I:1 Avg QP:22.44 size: 28019\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mframe P:62 Avg QP:23.31 size: 12894\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mframe B:177 Avg QP:28.63 size: 2068\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mconsecutive B-frames: 0.8% 1.7% 2.5% 95.0%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mmb I I16..4: 12.7% 76.2% 11.1%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mmb P I16..4: 1.9% 8.9% 1.1% P16..4: 35.3% 21.3% 10.8% 0.0% 0.0% skip:20.7%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mmb B I16..4: 0.0% 0.1% 0.0% B16..8: 39.1% 5.4% 1.0% direct: 1.4% skip:52.9% L0:35.4% L1:48.5% BI:16.2%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0m8x8 transform intra:75.2% inter:77.3%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mcoded y,uvDC,uvAC intra: 61.9% 52.1% 5.8% inter: 15.2% 6.9% 0.0%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mi16 v,h,dc,p: 69% 8% 8% 15%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mi8 v,h,dc,ddl,ddr,vr,hd,vl,hu: 25% 10% 19% 5% 8% 11% 8% 9% 6%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mi4 v,h,dc,ddl,ddr,vr,hd,vl,hu: 23% 8% 11% 5% 12% 21% 7% 9% 4%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mi8c dc,h,v,p: 53% 20% 19% 8%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mWeighted P-Frames: Y:21.0% UV:1.6%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mref P L0: 57.9% 21.2% 14.0% 5.9% 1.1%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mref B L0: 93.5% 5.3% 1.2%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mref B L1: 97.4% 2.6%\n", + "\u001b[1;36m[libx264 @ 0x55709b1c4800] \u001b[0mkb/s:1192.28\n", + "\u001b[1;36m[aac @ 0x55709b1c5700] \u001b[0mQavg: 534.430\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NSHSxV8iGybI" + }, + "source": [ + "**Another posibility is to use some screen recording tool, or if you need to crop many images at ones use face detector(https://github.com/1adrianb/face-alignment) , see https://github.com/AliaksandrSiarohin/video-preprocessing for preprcessing of VoxCeleb.** " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "d8kQ3U7MHqh-", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 471 + }, + "outputId": "8890e858-ac38-4d59-b014-cc4ed974f276" + }, + "source": [ + "source_image = imageio.imread('/content/gdrive/My Drive/first-order-motion-model/09.png')\n", + "driving_video = imageio.mimread('hinton.mp4', memtest=False)\n", + "\n", + "\n", + "#Resize image and video to 256x256\n", + "\n", + "source_image = resize(source_image, (256, 256))[..., :3]\n", + "driving_video = [resize(frame, (256, 256))[..., :3] for frame in driving_video]\n", + "\n", + "predictions = make_animation(source_image, driving_video, generator, kp_detector, relative=True,\n", + " adapt_movement_scale=True)\n", + "\n", + "HTML(display(source_image, driving_video, predictions).to_html5_video())" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "100%|██████████| 240/240 [00:30<00:00, 7.94it/s]\n" + ], + "name": "stderr" + }, + { + "output_type": "execute_result", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 10 + } + ] + } + ] +} \ No newline at end of file diff --git a/firstordermodel/reconstruction.py b/firstordermodel/reconstruction.py new file mode 100644 index 0000000..cb211df --- /dev/null +++ b/firstordermodel/reconstruction.py @@ -0,0 +1,67 @@ +import os +from tqdm import tqdm +import torch +from torch.utils.data import DataLoader +from logger import Logger, Visualizer +import numpy as np +import imageio +from sync_batchnorm import DataParallelWithCallback + + +def reconstruction(config, generator, kp_detector, checkpoint, log_dir, dataset): + png_dir = os.path.join(log_dir, 'reconstruction/png') + log_dir = os.path.join(log_dir, 'reconstruction') + + if checkpoint is not None: + Logger.load_cpk(checkpoint, generator=generator, kp_detector=kp_detector) + else: + raise AttributeError("Checkpoint should be specified for mode='reconstruction'.") + dataloader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=1) + + if not os.path.exists(log_dir): + os.makedirs(log_dir) + + if not os.path.exists(png_dir): + os.makedirs(png_dir) + + loss_list = [] + if torch.cuda.is_available(): + generator = DataParallelWithCallback(generator) + kp_detector = DataParallelWithCallback(kp_detector) + + generator.eval() + kp_detector.eval() + + for it, x in tqdm(enumerate(dataloader)): + if config['reconstruction_params']['num_videos'] is not None: + if it > config['reconstruction_params']['num_videos']: + break + with torch.no_grad(): + predictions = [] + visualizations = [] + if torch.cuda.is_available(): + x['video'] = x['video'].cuda() + kp_source = kp_detector(x['video'][:, :, 0]) + for frame_idx in range(x['video'].shape[2]): + source = x['video'][:, :, 0] + driving = x['video'][:, :, frame_idx] + kp_driving = kp_detector(driving) + out = generator(source, kp_source=kp_source, kp_driving=kp_driving) + out['kp_source'] = kp_source + out['kp_driving'] = kp_driving + del out['sparse_deformed'] + predictions.append(np.transpose(out['prediction'].data.cpu().numpy(), [0, 2, 3, 1])[0]) + + visualization = Visualizer(**config['visualizer_params']).visualize(source=source, + driving=driving, out=out) + visualizations.append(visualization) + + loss_list.append(torch.abs(out['prediction'] - driving).mean().cpu().numpy()) + + predictions = np.concatenate(predictions, axis=1) + imageio.imsave(os.path.join(png_dir, x['name'][0] + '.png'), (255 * predictions).astype(np.uint8)) + + image_name = x['name'][0] + config['reconstruction_params']['format'] + imageio.mimsave(os.path.join(log_dir, image_name), visualizations) + + print("Reconstruction loss: %s" % np.mean(loss_list)) diff --git a/firstordermodel/requirements.txt b/firstordermodel/requirements.txt new file mode 100644 index 0000000..10755c6 --- /dev/null +++ b/firstordermodel/requirements.txt @@ -0,0 +1,15 @@ +ffmpeg-python==0.2.0 +imageio==2.22.0 +imageio-ffmpeg==0.4.7 +matplotlib==3.6.0 +numpy==1.23.3 +pandas==1.5.0 +python-dateutil==2.8.2 +pytz==2022.2.1 +PyYAML==6.0 +scikit-image==0.19.3 +scikit-learn==1.1.2 +scipy==1.9.1 +torch==1.12.1 +torchvision==0.13.1 +tqdm==4.64.1 diff --git a/firstordermodel/run.py b/firstordermodel/run.py new file mode 100644 index 0000000..67cb828 --- /dev/null +++ b/firstordermodel/run.py @@ -0,0 +1,87 @@ +import matplotlib + +matplotlib.use('Agg') + +import os, sys +import yaml +from argparse import ArgumentParser +from time import gmtime, strftime +from shutil import copy + +from frames_dataset import FramesDataset + +from modules.generator import OcclusionAwareGenerator +from modules.discriminator import MultiScaleDiscriminator +from modules.keypoint_detector import KPDetector + +import torch + +from train import train +from reconstruction import reconstruction +from animate import animate + +if __name__ == "__main__": + + if sys.version_info[0] < 3: + raise Exception("You must use Python 3 or higher. Recommended version is Python 3.7") + + parser = ArgumentParser() + parser.add_argument("--config", required=True, help="path to config") + parser.add_argument("--mode", default="train", choices=["train", "reconstruction", "animate"]) + parser.add_argument("--log_dir", default='log', help="path to log into") + parser.add_argument("--checkpoint", default=None, help="path to checkpoint to restore") + parser.add_argument("--device_ids", default="0", type=lambda x: list(map(int, x.split(','))), + help="Names of the devices comma separated.") + parser.add_argument("--verbose", dest="verbose", action="store_true", help="Print model architecture") + parser.set_defaults(verbose=False) + + opt = parser.parse_args() + with open(opt.config) as f: + config = yaml.load(f) + + if opt.checkpoint is not None: + log_dir = os.path.join(*os.path.split(opt.checkpoint)[:-1]) + else: + log_dir = os.path.join(opt.log_dir, os.path.basename(opt.config).split('.')[0]) + log_dir += ' ' + strftime("%d_%m_%y_%H.%M.%S", gmtime()) + + generator = OcclusionAwareGenerator(**config['model_params']['generator_params'], + **config['model_params']['common_params']) + + if torch.cuda.is_available(): + generator.to(opt.device_ids[0]) + if opt.verbose: + print(generator) + + discriminator = MultiScaleDiscriminator(**config['model_params']['discriminator_params'], + **config['model_params']['common_params']) + if torch.cuda.is_available(): + discriminator.to(opt.device_ids[0]) + if opt.verbose: + print(discriminator) + + kp_detector = KPDetector(**config['model_params']['kp_detector_params'], + **config['model_params']['common_params']) + + if torch.cuda.is_available(): + kp_detector.to(opt.device_ids[0]) + + if opt.verbose: + print(kp_detector) + + dataset = FramesDataset(is_train=(opt.mode == 'train'), **config['dataset_params']) + + if not os.path.exists(log_dir): + os.makedirs(log_dir) + if not os.path.exists(os.path.join(log_dir, os.path.basename(opt.config))): + copy(opt.config, log_dir) + + if opt.mode == 'train': + print("Training...") + train(config, generator, discriminator, kp_detector, opt.checkpoint, log_dir, dataset, opt.device_ids) + elif opt.mode == 'reconstruction': + print("Reconstruction...") + reconstruction(config, generator, kp_detector, opt.checkpoint, log_dir, dataset) + elif opt.mode == 'animate': + print("Animate...") + animate(config, generator, kp_detector, opt.checkpoint, log_dir, dataset) diff --git a/firstordermodel/sup-mat/absolute-demo.gif b/firstordermodel/sup-mat/absolute-demo.gif new file mode 100644 index 0000000..113bd00 Binary files /dev/null and b/firstordermodel/sup-mat/absolute-demo.gif differ diff --git a/firstordermodel/sup-mat/face-swap.gif b/firstordermodel/sup-mat/face-swap.gif new file mode 100644 index 0000000..575047f Binary files /dev/null and b/firstordermodel/sup-mat/face-swap.gif differ diff --git a/firstordermodel/sup-mat/fashion-teaser.gif b/firstordermodel/sup-mat/fashion-teaser.gif new file mode 100644 index 0000000..16f545e Binary files /dev/null and b/firstordermodel/sup-mat/fashion-teaser.gif differ diff --git a/firstordermodel/sup-mat/mgif-teaser.gif b/firstordermodel/sup-mat/mgif-teaser.gif new file mode 100644 index 0000000..625da84 Binary files /dev/null and b/firstordermodel/sup-mat/mgif-teaser.gif differ diff --git a/firstordermodel/sup-mat/relative-demo.gif b/firstordermodel/sup-mat/relative-demo.gif new file mode 100644 index 0000000..9a635cf Binary files /dev/null and b/firstordermodel/sup-mat/relative-demo.gif differ diff --git a/firstordermodel/sup-mat/vox-teaser.gif b/firstordermodel/sup-mat/vox-teaser.gif new file mode 100644 index 0000000..fc0ce3d Binary files /dev/null and b/firstordermodel/sup-mat/vox-teaser.gif differ diff --git a/firstordermodel/sync_batchnorm/__init__.py b/firstordermodel/sync_batchnorm/__init__.py new file mode 100644 index 0000000..bc8709d --- /dev/null +++ b/firstordermodel/sync_batchnorm/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +# File : __init__.py +# Author : Jiayuan Mao +# Email : maojiayuan@gmail.com +# Date : 27/01/2018 +# +# This file is part of Synchronized-BatchNorm-PyTorch. +# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch +# Distributed under MIT License. + +from .batchnorm import SynchronizedBatchNorm1d, SynchronizedBatchNorm2d, SynchronizedBatchNorm3d +from .replicate import DataParallelWithCallback, patch_replication_callback diff --git a/firstordermodel/sync_batchnorm/batchnorm.py b/firstordermodel/sync_batchnorm/batchnorm.py new file mode 100644 index 0000000..5f4e763 --- /dev/null +++ b/firstordermodel/sync_batchnorm/batchnorm.py @@ -0,0 +1,315 @@ +# -*- coding: utf-8 -*- +# File : batchnorm.py +# Author : Jiayuan Mao +# Email : maojiayuan@gmail.com +# Date : 27/01/2018 +# +# This file is part of Synchronized-BatchNorm-PyTorch. +# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch +# Distributed under MIT License. + +import collections + +import torch +import torch.nn.functional as F + +from torch.nn.modules.batchnorm import _BatchNorm +from torch.nn.parallel._functions import ReduceAddCoalesced, Broadcast + +from .comm import SyncMaster + +__all__ = ['SynchronizedBatchNorm1d', 'SynchronizedBatchNorm2d', 'SynchronizedBatchNorm3d'] + + +def _sum_ft(tensor): + """sum over the first and last dimention""" + return tensor.sum(dim=0).sum(dim=-1) + + +def _unsqueeze_ft(tensor): + """add new dementions at the front and the tail""" + return tensor.unsqueeze(0).unsqueeze(-1) + + +_ChildMessage = collections.namedtuple('_ChildMessage', ['sum', 'ssum', 'sum_size']) +_MasterMessage = collections.namedtuple('_MasterMessage', ['sum', 'inv_std']) + + +class _SynchronizedBatchNorm(_BatchNorm): + def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True): + super(_SynchronizedBatchNorm, self).__init__(num_features, eps=eps, momentum=momentum, affine=affine) + + self._sync_master = SyncMaster(self._data_parallel_master) + + self._is_parallel = False + self._parallel_id = None + self._slave_pipe = None + + def forward(self, input): + # If it is not parallel computation or is in evaluation mode, use PyTorch's implementation. + if not (self._is_parallel and self.training): + return F.batch_norm( + input, self.running_mean, self.running_var, self.weight, self.bias, + self.training, self.momentum, self.eps) + + # Resize the input to (B, C, -1). + input_shape = input.size() + input = input.view(input.size(0), self.num_features, -1) + + # Compute the sum and square-sum. + sum_size = input.size(0) * input.size(2) + input_sum = _sum_ft(input) + input_ssum = _sum_ft(input ** 2) + + # Reduce-and-broadcast the statistics. + if self._parallel_id == 0: + mean, inv_std = self._sync_master.run_master(_ChildMessage(input_sum, input_ssum, sum_size)) + else: + mean, inv_std = self._slave_pipe.run_slave(_ChildMessage(input_sum, input_ssum, sum_size)) + + # Compute the output. + if self.affine: + # MJY:: Fuse the multiplication for speed. + output = (input - _unsqueeze_ft(mean)) * _unsqueeze_ft(inv_std * self.weight) + _unsqueeze_ft(self.bias) + else: + output = (input - _unsqueeze_ft(mean)) * _unsqueeze_ft(inv_std) + + # Reshape it. + return output.view(input_shape) + + def __data_parallel_replicate__(self, ctx, copy_id): + self._is_parallel = True + self._parallel_id = copy_id + + # parallel_id == 0 means master device. + if self._parallel_id == 0: + ctx.sync_master = self._sync_master + else: + self._slave_pipe = ctx.sync_master.register_slave(copy_id) + + def _data_parallel_master(self, intermediates): + """Reduce the sum and square-sum, compute the statistics, and broadcast it.""" + + # Always using same "device order" makes the ReduceAdd operation faster. + # Thanks to:: Tete Xiao (http://tetexiao.com/) + intermediates = sorted(intermediates, key=lambda i: i[1].sum.get_device()) + + to_reduce = [i[1][:2] for i in intermediates] + to_reduce = [j for i in to_reduce for j in i] # flatten + target_gpus = [i[1].sum.get_device() for i in intermediates] + + sum_size = sum([i[1].sum_size for i in intermediates]) + sum_, ssum = ReduceAddCoalesced.apply(target_gpus[0], 2, *to_reduce) + mean, inv_std = self._compute_mean_std(sum_, ssum, sum_size) + + broadcasted = Broadcast.apply(target_gpus, mean, inv_std) + + outputs = [] + for i, rec in enumerate(intermediates): + outputs.append((rec[0], _MasterMessage(*broadcasted[i*2:i*2+2]))) + + return outputs + + def _compute_mean_std(self, sum_, ssum, size): + """Compute the mean and standard-deviation with sum and square-sum. This method + also maintains the moving average on the master device.""" + assert size > 1, 'BatchNorm computes unbiased standard-deviation, which requires size > 1.' + mean = sum_ / size + sumvar = ssum - sum_ * mean + unbias_var = sumvar / (size - 1) + bias_var = sumvar / size + + self.running_mean = (1 - self.momentum) * self.running_mean + self.momentum * mean.data + self.running_var = (1 - self.momentum) * self.running_var + self.momentum * unbias_var.data + + return mean, bias_var.clamp(self.eps) ** -0.5 + + +class SynchronizedBatchNorm1d(_SynchronizedBatchNorm): + r"""Applies Synchronized Batch Normalization over a 2d or 3d input that is seen as a + mini-batch. + + .. math:: + + y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta + + This module differs from the built-in PyTorch BatchNorm1d as the mean and + standard-deviation are reduced across all devices during training. + + For example, when one uses `nn.DataParallel` to wrap the network during + training, PyTorch's implementation normalize the tensor on each device using + the statistics only on that device, which accelerated the computation and + is also easy to implement, but the statistics might be inaccurate. + Instead, in this synchronized version, the statistics will be computed + over all training samples distributed on multiple devices. + + Note that, for one-GPU or CPU-only case, this module behaves exactly same + as the built-in PyTorch implementation. + + The mean and standard-deviation are calculated per-dimension over + the mini-batches and gamma and beta are learnable parameter vectors + of size C (where C is the input size). + + During training, this layer keeps a running estimate of its computed mean + and variance. The running sum is kept with a default momentum of 0.1. + + During evaluation, this running mean/variance is used for normalization. + + Because the BatchNorm is done over the `C` dimension, computing statistics + on `(N, L)` slices, it's common terminology to call this Temporal BatchNorm + + Args: + num_features: num_features from an expected input of size + `batch_size x num_features [x width]` + eps: a value added to the denominator for numerical stability. + Default: 1e-5 + momentum: the value used for the running_mean and running_var + computation. Default: 0.1 + affine: a boolean value that when set to ``True``, gives the layer learnable + affine parameters. Default: ``True`` + + Shape: + - Input: :math:`(N, C)` or :math:`(N, C, L)` + - Output: :math:`(N, C)` or :math:`(N, C, L)` (same shape as input) + + Examples: + >>> # With Learnable Parameters + >>> m = SynchronizedBatchNorm1d(100) + >>> # Without Learnable Parameters + >>> m = SynchronizedBatchNorm1d(100, affine=False) + >>> input = torch.autograd.Variable(torch.randn(20, 100)) + >>> output = m(input) + """ + + def _check_input_dim(self, input): + if input.dim() != 2 and input.dim() != 3: + raise ValueError('expected 2D or 3D input (got {}D input)' + .format(input.dim())) + super(SynchronizedBatchNorm1d, self)._check_input_dim(input) + + +class SynchronizedBatchNorm2d(_SynchronizedBatchNorm): + r"""Applies Batch Normalization over a 4d input that is seen as a mini-batch + of 3d inputs + + .. math:: + + y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta + + This module differs from the built-in PyTorch BatchNorm2d as the mean and + standard-deviation are reduced across all devices during training. + + For example, when one uses `nn.DataParallel` to wrap the network during + training, PyTorch's implementation normalize the tensor on each device using + the statistics only on that device, which accelerated the computation and + is also easy to implement, but the statistics might be inaccurate. + Instead, in this synchronized version, the statistics will be computed + over all training samples distributed on multiple devices. + + Note that, for one-GPU or CPU-only case, this module behaves exactly same + as the built-in PyTorch implementation. + + The mean and standard-deviation are calculated per-dimension over + the mini-batches and gamma and beta are learnable parameter vectors + of size C (where C is the input size). + + During training, this layer keeps a running estimate of its computed mean + and variance. The running sum is kept with a default momentum of 0.1. + + During evaluation, this running mean/variance is used for normalization. + + Because the BatchNorm is done over the `C` dimension, computing statistics + on `(N, H, W)` slices, it's common terminology to call this Spatial BatchNorm + + Args: + num_features: num_features from an expected input of + size batch_size x num_features x height x width + eps: a value added to the denominator for numerical stability. + Default: 1e-5 + momentum: the value used for the running_mean and running_var + computation. Default: 0.1 + affine: a boolean value that when set to ``True``, gives the layer learnable + affine parameters. Default: ``True`` + + Shape: + - Input: :math:`(N, C, H, W)` + - Output: :math:`(N, C, H, W)` (same shape as input) + + Examples: + >>> # With Learnable Parameters + >>> m = SynchronizedBatchNorm2d(100) + >>> # Without Learnable Parameters + >>> m = SynchronizedBatchNorm2d(100, affine=False) + >>> input = torch.autograd.Variable(torch.randn(20, 100, 35, 45)) + >>> output = m(input) + """ + + def _check_input_dim(self, input): + if input.dim() != 4: + raise ValueError('expected 4D input (got {}D input)' + .format(input.dim())) + super(SynchronizedBatchNorm2d, self)._check_input_dim(input) + + +class SynchronizedBatchNorm3d(_SynchronizedBatchNorm): + r"""Applies Batch Normalization over a 5d input that is seen as a mini-batch + of 4d inputs + + .. math:: + + y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta + + This module differs from the built-in PyTorch BatchNorm3d as the mean and + standard-deviation are reduced across all devices during training. + + For example, when one uses `nn.DataParallel` to wrap the network during + training, PyTorch's implementation normalize the tensor on each device using + the statistics only on that device, which accelerated the computation and + is also easy to implement, but the statistics might be inaccurate. + Instead, in this synchronized version, the statistics will be computed + over all training samples distributed on multiple devices. + + Note that, for one-GPU or CPU-only case, this module behaves exactly same + as the built-in PyTorch implementation. + + The mean and standard-deviation are calculated per-dimension over + the mini-batches and gamma and beta are learnable parameter vectors + of size C (where C is the input size). + + During training, this layer keeps a running estimate of its computed mean + and variance. The running sum is kept with a default momentum of 0.1. + + During evaluation, this running mean/variance is used for normalization. + + Because the BatchNorm is done over the `C` dimension, computing statistics + on `(N, D, H, W)` slices, it's common terminology to call this Volumetric BatchNorm + or Spatio-temporal BatchNorm + + Args: + num_features: num_features from an expected input of + size batch_size x num_features x depth x height x width + eps: a value added to the denominator for numerical stability. + Default: 1e-5 + momentum: the value used for the running_mean and running_var + computation. Default: 0.1 + affine: a boolean value that when set to ``True``, gives the layer learnable + affine parameters. Default: ``True`` + + Shape: + - Input: :math:`(N, C, D, H, W)` + - Output: :math:`(N, C, D, H, W)` (same shape as input) + + Examples: + >>> # With Learnable Parameters + >>> m = SynchronizedBatchNorm3d(100) + >>> # Without Learnable Parameters + >>> m = SynchronizedBatchNorm3d(100, affine=False) + >>> input = torch.autograd.Variable(torch.randn(20, 100, 35, 45, 10)) + >>> output = m(input) + """ + + def _check_input_dim(self, input): + if input.dim() != 5: + raise ValueError('expected 5D input (got {}D input)' + .format(input.dim())) + super(SynchronizedBatchNorm3d, self)._check_input_dim(input) diff --git a/firstordermodel/sync_batchnorm/comm.py b/firstordermodel/sync_batchnorm/comm.py new file mode 100644 index 0000000..922f8c4 --- /dev/null +++ b/firstordermodel/sync_batchnorm/comm.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +# File : comm.py +# Author : Jiayuan Mao +# Email : maojiayuan@gmail.com +# Date : 27/01/2018 +# +# This file is part of Synchronized-BatchNorm-PyTorch. +# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch +# Distributed under MIT License. + +import queue +import collections +import threading + +__all__ = ['FutureResult', 'SlavePipe', 'SyncMaster'] + + +class FutureResult(object): + """A thread-safe future implementation. Used only as one-to-one pipe.""" + + def __init__(self): + self._result = None + self._lock = threading.Lock() + self._cond = threading.Condition(self._lock) + + def put(self, result): + with self._lock: + assert self._result is None, 'Previous result has\'t been fetched.' + self._result = result + self._cond.notify() + + def get(self): + with self._lock: + if self._result is None: + self._cond.wait() + + res = self._result + self._result = None + return res + + +_MasterRegistry = collections.namedtuple('MasterRegistry', ['result']) +_SlavePipeBase = collections.namedtuple('_SlavePipeBase', ['identifier', 'queue', 'result']) + + +class SlavePipe(_SlavePipeBase): + """Pipe for master-slave communication.""" + + def run_slave(self, msg): + self.queue.put((self.identifier, msg)) + ret = self.result.get() + self.queue.put(True) + return ret + + +class SyncMaster(object): + """An abstract `SyncMaster` object. + + - During the replication, as the data parallel will trigger an callback of each module, all slave devices should + call `register(id)` and obtain an `SlavePipe` to communicate with the master. + - During the forward pass, master device invokes `run_master`, all messages from slave devices will be collected, + and passed to a registered callback. + - After receiving the messages, the master device should gather the information and determine to message passed + back to each slave devices. + """ + + def __init__(self, master_callback): + """ + + Args: + master_callback: a callback to be invoked after having collected messages from slave devices. + """ + self._master_callback = master_callback + self._queue = queue.Queue() + self._registry = collections.OrderedDict() + self._activated = False + + def __getstate__(self): + return {'master_callback': self._master_callback} + + def __setstate__(self, state): + self.__init__(state['master_callback']) + + def register_slave(self, identifier): + """ + Register an slave device. + + Args: + identifier: an identifier, usually is the device id. + + Returns: a `SlavePipe` object which can be used to communicate with the master device. + + """ + if self._activated: + assert self._queue.empty(), 'Queue is not clean before next initialization.' + self._activated = False + self._registry.clear() + future = FutureResult() + self._registry[identifier] = _MasterRegistry(future) + return SlavePipe(identifier, self._queue, future) + + def run_master(self, master_msg): + """ + Main entry for the master device in each forward pass. + The messages were first collected from each devices (including the master device), and then + an callback will be invoked to compute the message to be sent back to each devices + (including the master device). + + Args: + master_msg: the message that the master want to send to itself. This will be placed as the first + message when calling `master_callback`. For detailed usage, see `_SynchronizedBatchNorm` for an example. + + Returns: the message to be sent back to the master device. + + """ + self._activated = True + + intermediates = [(0, master_msg)] + for i in range(self.nr_slaves): + intermediates.append(self._queue.get()) + + results = self._master_callback(intermediates) + assert results[0][0] == 0, 'The first result should belongs to the master.' + + for i, res in results: + if i == 0: + continue + self._registry[i].result.put(res) + + for i in range(self.nr_slaves): + assert self._queue.get() is True + + return results[0][1] + + @property + def nr_slaves(self): + return len(self._registry) diff --git a/firstordermodel/sync_batchnorm/replicate.py b/firstordermodel/sync_batchnorm/replicate.py new file mode 100644 index 0000000..b71c7b8 --- /dev/null +++ b/firstordermodel/sync_batchnorm/replicate.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# File : replicate.py +# Author : Jiayuan Mao +# Email : maojiayuan@gmail.com +# Date : 27/01/2018 +# +# This file is part of Synchronized-BatchNorm-PyTorch. +# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch +# Distributed under MIT License. + +import functools + +from torch.nn.parallel.data_parallel import DataParallel + +__all__ = [ + 'CallbackContext', + 'execute_replication_callbacks', + 'DataParallelWithCallback', + 'patch_replication_callback' +] + + +class CallbackContext(object): + pass + + +def execute_replication_callbacks(modules): + """ + Execute an replication callback `__data_parallel_replicate__` on each module created by original replication. + + The callback will be invoked with arguments `__data_parallel_replicate__(ctx, copy_id)` + + Note that, as all modules are isomorphism, we assign each sub-module with a context + (shared among multiple copies of this module on different devices). + Through this context, different copies can share some information. + + We guarantee that the callback on the master copy (the first copy) will be called ahead of calling the callback + of any slave copies. + """ + master_copy = modules[0] + nr_modules = len(list(master_copy.modules())) + ctxs = [CallbackContext() for _ in range(nr_modules)] + + for i, module in enumerate(modules): + for j, m in enumerate(module.modules()): + if hasattr(m, '__data_parallel_replicate__'): + m.__data_parallel_replicate__(ctxs[j], i) + + +class DataParallelWithCallback(DataParallel): + """ + Data Parallel with a replication callback. + + An replication callback `__data_parallel_replicate__` of each module will be invoked after being created by + original `replicate` function. + The callback will be invoked with arguments `__data_parallel_replicate__(ctx, copy_id)` + + Examples: + > sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False) + > sync_bn = DataParallelWithCallback(sync_bn, device_ids=[0, 1]) + # sync_bn.__data_parallel_replicate__ will be invoked. + """ + + def replicate(self, module, device_ids): + modules = super(DataParallelWithCallback, self).replicate(module, device_ids) + execute_replication_callbacks(modules) + return modules + + +def patch_replication_callback(data_parallel): + """ + Monkey-patch an existing `DataParallel` object. Add the replication callback. + Useful when you have customized `DataParallel` implementation. + + Examples: + > sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False) + > sync_bn = DataParallel(sync_bn, device_ids=[0, 1]) + > patch_replication_callback(sync_bn) + # this is equivalent to + > sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False) + > sync_bn = DataParallelWithCallback(sync_bn, device_ids=[0, 1]) + """ + + assert isinstance(data_parallel, DataParallel) + + old_replicate = data_parallel.replicate + + @functools.wraps(old_replicate) + def new_replicate(module, device_ids): + modules = old_replicate(module, device_ids) + execute_replication_callbacks(modules) + return modules + + data_parallel.replicate = new_replicate diff --git a/firstordermodel/sync_batchnorm/unittest.py b/firstordermodel/sync_batchnorm/unittest.py new file mode 100644 index 0000000..0675c02 --- /dev/null +++ b/firstordermodel/sync_batchnorm/unittest.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# File : unittest.py +# Author : Jiayuan Mao +# Email : maojiayuan@gmail.com +# Date : 27/01/2018 +# +# This file is part of Synchronized-BatchNorm-PyTorch. +# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch +# Distributed under MIT License. + +import unittest + +import numpy as np +from torch.autograd import Variable + + +def as_numpy(v): + if isinstance(v, Variable): + v = v.data + return v.cpu().numpy() + + +class TorchTestCase(unittest.TestCase): + def assertTensorClose(self, a, b, atol=1e-3, rtol=1e-3): + npa, npb = as_numpy(a), as_numpy(b) + self.assertTrue( + np.allclose(npa, npb, atol=atol), + 'Tensor close check failed\n{}\n{}\nadiff={}, rdiff={}'.format(a, b, np.abs(npa - npb).max(), np.abs((npa - npb) / np.fmax(npa, 1e-5)).max()) + ) diff --git a/firstordermodel/train.py b/firstordermodel/train.py new file mode 100644 index 0000000..a987e6c --- /dev/null +++ b/firstordermodel/train.py @@ -0,0 +1,87 @@ +from tqdm import trange +import torch + +from torch.utils.data import DataLoader + +from logger import Logger +from modules.model import GeneratorFullModel, DiscriminatorFullModel + +from torch.optim.lr_scheduler import MultiStepLR + +from sync_batchnorm import DataParallelWithCallback + +from frames_dataset import DatasetRepeater + + +def train(config, generator, discriminator, kp_detector, checkpoint, log_dir, dataset, device_ids): + train_params = config['train_params'] + + optimizer_generator = torch.optim.Adam(generator.parameters(), lr=train_params['lr_generator'], betas=(0.5, 0.999)) + optimizer_discriminator = torch.optim.Adam(discriminator.parameters(), lr=train_params['lr_discriminator'], betas=(0.5, 0.999)) + optimizer_kp_detector = torch.optim.Adam(kp_detector.parameters(), lr=train_params['lr_kp_detector'], betas=(0.5, 0.999)) + + if checkpoint is not None: + start_epoch = Logger.load_cpk(checkpoint, generator, discriminator, kp_detector, + optimizer_generator, optimizer_discriminator, + None if train_params['lr_kp_detector'] == 0 else optimizer_kp_detector) + else: + start_epoch = 0 + + scheduler_generator = MultiStepLR(optimizer_generator, train_params['epoch_milestones'], gamma=0.1, + last_epoch=start_epoch - 1) + scheduler_discriminator = MultiStepLR(optimizer_discriminator, train_params['epoch_milestones'], gamma=0.1, + last_epoch=start_epoch - 1) + scheduler_kp_detector = MultiStepLR(optimizer_kp_detector, train_params['epoch_milestones'], gamma=0.1, + last_epoch=-1 + start_epoch * (train_params['lr_kp_detector'] != 0)) + + if 'num_repeats' in train_params or train_params['num_repeats'] != 1: + dataset = DatasetRepeater(dataset, train_params['num_repeats']) + dataloader = DataLoader(dataset, batch_size=train_params['batch_size'], shuffle=True, num_workers=6, drop_last=True) + + generator_full = GeneratorFullModel(kp_detector, generator, discriminator, train_params) + discriminator_full = DiscriminatorFullModel(kp_detector, generator, discriminator, train_params) + + if torch.cuda.is_available(): + generator_full = DataParallelWithCallback(generator_full, device_ids=device_ids) + discriminator_full = DataParallelWithCallback(discriminator_full, device_ids=device_ids) + + with Logger(log_dir=log_dir, visualizer_params=config['visualizer_params'], checkpoint_freq=train_params['checkpoint_freq']) as logger: + for epoch in trange(start_epoch, train_params['num_epochs']): + for x in dataloader: + losses_generator, generated = generator_full(x) + + loss_values = [val.mean() for val in losses_generator.values()] + loss = sum(loss_values) + + loss.backward() + optimizer_generator.step() + optimizer_generator.zero_grad() + optimizer_kp_detector.step() + optimizer_kp_detector.zero_grad() + + if train_params['loss_weights']['generator_gan'] != 0: + optimizer_discriminator.zero_grad() + losses_discriminator = discriminator_full(x, generated) + loss_values = [val.mean() for val in losses_discriminator.values()] + loss = sum(loss_values) + + loss.backward() + optimizer_discriminator.step() + optimizer_discriminator.zero_grad() + else: + losses_discriminator = {} + + losses_generator.update(losses_discriminator) + losses = {key: value.mean().detach().data.cpu().numpy() for key, value in losses_generator.items()} + logger.log_iter(losses=losses) + + scheduler_generator.step() + scheduler_discriminator.step() + scheduler_kp_detector.step() + + logger.log_epoch(epoch, {'generator': generator, + 'discriminator': discriminator, + 'kp_detector': kp_detector, + 'optimizer_generator': optimizer_generator, + 'optimizer_discriminator': optimizer_discriminator, + 'optimizer_kp_detector': optimizer_kp_detector}, inp=x, out=generated) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..3df54bd Binary files /dev/null and b/requirements.txt differ diff --git a/static/css/main.e6c13ad2.css b/static/css/main.e6c13ad2.css new file mode 100644 index 0000000..50410fa --- /dev/null +++ b/static/css/main.e6c13ad2.css @@ -0,0 +1,2 @@ +body{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;margin:0}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace} +/*# sourceMappingURL=main.e6c13ad2.css.map*/ \ No newline at end of file diff --git a/static/css/main.e6c13ad2.css.map b/static/css/main.e6c13ad2.css.map new file mode 100644 index 0000000..5c4dfb9 --- /dev/null +++ b/static/css/main.e6c13ad2.css.map @@ -0,0 +1 @@ +{"version":3,"file":"static/css/main.e6c13ad2.css","mappings":"AAAA,KAKE,kCAAmC,CACnC,iCAAkC,CAJlC,mIAEY,CAHZ,QAMF,CAEA,KACE,uEAEF","sources":["index.css"],"sourcesContent":["body {\n margin: 0;\n font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',\n 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',\n monospace;\n}\n"],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/static/js/453.8beb5808.chunk.js b/static/js/453.8beb5808.chunk.js new file mode 100644 index 0000000..2ea2653 --- /dev/null +++ b/static/js/453.8beb5808.chunk.js @@ -0,0 +1,2 @@ +"use strict";(self.webpackChunkimage_processing_app=self.webpackChunkimage_processing_app||[]).push([[453],{6453:(e,t,n)=>{n.r(t),n.d(t,{getCLS:()=>y,getFCP:()=>g,getFID:()=>C,getLCP:()=>P,getTTFB:()=>D});var i,r,a,o,u=function(e,t){return{name:e,value:void 0===t?-1:t,delta:0,entries:[],id:"v2-".concat(Date.now(),"-").concat(Math.floor(8999999999999*Math.random())+1e12)}},c=function(e,t){try{if(PerformanceObserver.supportedEntryTypes.includes(e)){if("first-input"===e&&!("PerformanceEventTiming"in self))return;var n=new PerformanceObserver((function(e){return e.getEntries().map(t)}));return n.observe({type:e,buffered:!0}),n}}catch(e){}},s=function(e,t){var n=function n(i){"pagehide"!==i.type&&"hidden"!==document.visibilityState||(e(i),t&&(removeEventListener("visibilitychange",n,!0),removeEventListener("pagehide",n,!0)))};addEventListener("visibilitychange",n,!0),addEventListener("pagehide",n,!0)},f=function(e){addEventListener("pageshow",(function(t){t.persisted&&e(t)}),!0)},m=function(e,t,n){var i;return function(r){t.value>=0&&(r||n)&&(t.delta=t.value-(i||0),(t.delta||void 0===i)&&(i=t.value,e(t)))}},p=-1,v=function(){return"hidden"===document.visibilityState?0:1/0},d=function(){s((function(e){var t=e.timeStamp;p=t}),!0)},l=function(){return p<0&&(p=v(),d(),f((function(){setTimeout((function(){p=v(),d()}),0)}))),{get firstHiddenTime(){return p}}},g=function(e,t){var n,i=l(),r=u("FCP"),a=function(e){"first-contentful-paint"===e.name&&(s&&s.disconnect(),e.startTime-1&&e(t)},r=u("CLS",0),a=0,o=[],p=function(e){if(!e.hadRecentInput){var t=o[0],i=o[o.length-1];a&&e.startTime-i.startTime<1e3&&e.startTime-t.startTime<5e3?(a+=e.value,o.push(e)):(a=e.value,o=[e]),a>r.value&&(r.value=a,r.entries=o,n())}},v=c("layout-shift",p);v&&(n=m(i,r,t),s((function(){v.takeRecords().map(p),n(!0)})),f((function(){a=0,T=-1,r=u("CLS",0),n=m(i,r,t)})))},E={passive:!0,capture:!0},w=new Date,L=function(e,t){i||(i=t,r=e,a=new Date,F(removeEventListener),S())},S=function(){if(r>=0&&r1e12?new Date:performance.now())-e.timeStamp;"pointerdown"==e.type?function(e,t){var n=function(){L(e,t),r()},i=function(){r()},r=function(){removeEventListener("pointerup",n,E),removeEventListener("pointercancel",i,E)};addEventListener("pointerup",n,E),addEventListener("pointercancel",i,E)}(t,e):L(t,e)}},F=function(e){["mousedown","keydown","touchstart","pointerdown"].forEach((function(t){return e(t,b,E)}))},C=function(e,t){var n,a=l(),p=u("FID"),v=function(e){e.startTimeperformance.now())return;n.entries=[t],e(n)}catch(e){}},"complete"===document.readyState?setTimeout(t,0):addEventListener("load",(function(){return setTimeout(t,0)}))}}}]); +//# sourceMappingURL=453.8beb5808.chunk.js.map \ No newline at end of file diff --git a/static/js/453.8beb5808.chunk.js.map b/static/js/453.8beb5808.chunk.js.map new file mode 100644 index 0000000..4dac5af --- /dev/null +++ b/static/js/453.8beb5808.chunk.js.map @@ -0,0 +1 @@ +{"version":3,"file":"static/js/453.8beb5808.chunk.js","mappings":"6MAAA,IAAIA,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,SAASJ,EAAEC,GAAG,MAAM,CAACI,KAAKL,EAAEM,WAAM,IAASL,GAAG,EAAEA,EAAEM,MAAM,EAAEC,QAAQ,GAAGC,GAAG,MAAMC,OAAOC,KAAKC,MAAM,KAAKF,OAAOG,KAAKC,MAAM,cAAcD,KAAKE,UAAU,MAAM,EAAEC,EAAE,SAAShB,EAAEC,GAAG,IAAI,GAAGgB,oBAAoBC,oBAAoBC,SAASnB,GAAG,CAAC,GAAG,gBAAgBA,KAAK,2BAA2BoB,MAAM,OAAO,IAAIlB,EAAE,IAAIe,qBAAqB,SAASjB,GAAG,OAAOA,EAAEqB,aAAaC,IAAIrB,EAAE,IAAI,OAAOC,EAAEqB,QAAQ,CAACC,KAAKxB,EAAEyB,UAAS,IAAKvB,CAAC,CAAC,CAAC,MAAMF,GAAG,CAAC,EAAE0B,EAAE,SAAS1B,EAAEC,GAAG,IAAIC,EAAE,SAASA,EAAEC,GAAG,aAAaA,EAAEqB,MAAM,WAAWG,SAASC,kBAAkB5B,EAAEG,GAAGF,IAAI4B,oBAAoB,mBAAmB3B,GAAE,GAAI2B,oBAAoB,WAAW3B,GAAE,IAAK,EAAE4B,iBAAiB,mBAAmB5B,GAAE,GAAI4B,iBAAiB,WAAW5B,GAAE,EAAG,EAAE6B,EAAE,SAAS/B,GAAG8B,iBAAiB,YAAY,SAAS7B,GAAGA,EAAE+B,WAAWhC,EAAEC,EAAE,IAAG,EAAG,EAAEgC,EAAE,SAASjC,EAAEC,EAAEC,GAAG,IAAIC,EAAE,OAAO,SAASC,GAAGH,EAAEK,OAAO,IAAIF,GAAGF,KAAKD,EAAEM,MAAMN,EAAEK,OAAOH,GAAG,IAAIF,EAAEM,YAAO,IAASJ,KAAKA,EAAEF,EAAEK,MAAMN,EAAEC,IAAI,CAAC,EAAEiC,GAAG,EAAEC,EAAE,WAAW,MAAM,WAAWR,SAASC,gBAAgB,EAAE,GAAG,EAAEQ,EAAE,WAAWV,GAAG,SAAS1B,GAAG,IAAIC,EAAED,EAAEqC,UAAUH,EAAEjC,CAAC,IAAG,EAAG,EAAEqC,EAAE,WAAW,OAAOJ,EAAE,IAAIA,EAAEC,IAAIC,IAAIL,GAAG,WAAWQ,YAAY,WAAWL,EAAEC,IAAIC,GAAG,GAAG,EAAE,KAAK,CAAC,mBAAII,GAAkB,OAAON,CAAC,EAAE,EAAEO,EAAE,SAASzC,EAAEC,GAAG,IAAIC,EAAEC,EAAEmC,IAAIZ,EAAEtB,EAAE,OAAO8B,EAAE,SAASlC,GAAG,2BAA2BA,EAAEK,OAAO+B,GAAGA,EAAEM,aAAa1C,EAAE2C,UAAUxC,EAAEqC,kBAAkBd,EAAEpB,MAAMN,EAAE2C,UAAUjB,EAAElB,QAAQoC,KAAK5C,GAAGE,GAAE,IAAK,EAAEiC,EAAEU,OAAOC,aAAaA,YAAYC,kBAAkBD,YAAYC,iBAAiB,0BAA0B,GAAGX,EAAED,EAAE,KAAKnB,EAAE,QAAQkB,IAAIC,GAAGC,KAAKlC,EAAE+B,EAAEjC,EAAE0B,EAAEzB,GAAGkC,GAAGD,EAAEC,GAAGJ,GAAG,SAAS5B,GAAGuB,EAAEtB,EAAE,OAAOF,EAAE+B,EAAEjC,EAAE0B,EAAEzB,GAAG+C,uBAAuB,WAAWA,uBAAuB,WAAWtB,EAAEpB,MAAMwC,YAAYlC,MAAMT,EAAEkC,UAAUnC,GAAE,EAAG,GAAG,GAAG,IAAI,EAAE+C,GAAE,EAAGC,GAAG,EAAEC,EAAE,SAASnD,EAAEC,GAAGgD,IAAIR,GAAG,SAASzC,GAAGkD,EAAElD,EAAEM,KAAK,IAAI2C,GAAE,GAAI,IAAI/C,EAAEC,EAAE,SAASF,GAAGiD,GAAG,GAAGlD,EAAEC,EAAE,EAAEiC,EAAE9B,EAAE,MAAM,GAAG+B,EAAE,EAAEC,EAAE,GAAGE,EAAE,SAAStC,GAAG,IAAIA,EAAEoD,eAAe,CAAC,IAAInD,EAAEmC,EAAE,GAAGjC,EAAEiC,EAAEA,EAAEiB,OAAO,GAAGlB,GAAGnC,EAAE2C,UAAUxC,EAAEwC,UAAU,KAAK3C,EAAE2C,UAAU1C,EAAE0C,UAAU,KAAKR,GAAGnC,EAAEM,MAAM8B,EAAEQ,KAAK5C,KAAKmC,EAAEnC,EAAEM,MAAM8B,EAAE,CAACpC,IAAImC,EAAED,EAAE5B,QAAQ4B,EAAE5B,MAAM6B,EAAED,EAAE1B,QAAQ4B,EAAElC,IAAI,CAAC,EAAEiD,EAAEnC,EAAE,eAAesB,GAAGa,IAAIjD,EAAE+B,EAAE9B,EAAE+B,EAAEjC,GAAGyB,GAAG,WAAWyB,EAAEG,cAAchC,IAAIgB,GAAGpC,GAAE,EAAG,IAAI6B,GAAG,WAAWI,EAAE,EAAEe,GAAG,EAAEhB,EAAE9B,EAAE,MAAM,GAAGF,EAAE+B,EAAE9B,EAAE+B,EAAEjC,EAAE,IAAI,EAAEsD,EAAE,CAACC,SAAQ,EAAGC,SAAQ,GAAIC,EAAE,IAAI/C,KAAKgD,EAAE,SAASxD,EAAEC,GAAGJ,IAAIA,EAAEI,EAAEH,EAAEE,EAAED,EAAE,IAAIS,KAAKiD,EAAE/B,qBAAqBgC,IAAI,EAAEA,EAAE,WAAW,GAAG5D,GAAG,GAAGA,EAAEC,EAAEwD,EAAE,CAAC,IAAItD,EAAE,CAAC0D,UAAU,cAAczD,KAAKL,EAAEwB,KAAKuC,OAAO/D,EAAE+D,OAAOC,WAAWhE,EAAEgE,WAAWrB,UAAU3C,EAAEqC,UAAU4B,gBAAgBjE,EAAEqC,UAAUpC,GAAGE,EAAE+D,SAAS,SAASlE,GAAGA,EAAEI,EAAE,IAAID,EAAE,EAAE,CAAC,EAAEgE,EAAE,SAASnE,GAAG,GAAGA,EAAEgE,WAAW,CAAC,IAAI/D,GAAGD,EAAEqC,UAAU,KAAK,IAAI1B,KAAKmC,YAAYlC,OAAOZ,EAAEqC,UAAU,eAAerC,EAAEwB,KAAK,SAASxB,EAAEC,GAAG,IAAIC,EAAE,WAAWyD,EAAE3D,EAAEC,GAAGG,GAAG,EAAED,EAAE,WAAWC,GAAG,EAAEA,EAAE,WAAWyB,oBAAoB,YAAY3B,EAAEqD,GAAG1B,oBAAoB,gBAAgB1B,EAAEoD,EAAE,EAAEzB,iBAAiB,YAAY5B,EAAEqD,GAAGzB,iBAAiB,gBAAgB3B,EAAEoD,EAAE,CAAhO,CAAkOtD,EAAED,GAAG2D,EAAE1D,EAAED,EAAE,CAAC,EAAE4D,EAAE,SAAS5D,GAAG,CAAC,YAAY,UAAU,aAAa,eAAekE,SAAS,SAASjE,GAAG,OAAOD,EAAEC,EAAEkE,EAAEZ,EAAE,GAAG,EAAEa,EAAE,SAASlE,EAAEgC,GAAG,IAAIC,EAAEC,EAAEE,IAAIG,EAAErC,EAAE,OAAO6C,EAAE,SAASjD,GAAGA,EAAE2C,UAAUP,EAAEI,kBAAkBC,EAAEnC,MAAMN,EAAEiE,gBAAgBjE,EAAE2C,UAAUF,EAAEjC,QAAQoC,KAAK5C,GAAGmC,GAAE,GAAI,EAAEe,EAAElC,EAAE,cAAciC,GAAGd,EAAEF,EAAE/B,EAAEuC,EAAEP,GAAGgB,GAAGxB,GAAG,WAAWwB,EAAEI,cAAchC,IAAI2B,GAAGC,EAAER,YAAY,IAAG,GAAIQ,GAAGnB,GAAG,WAAW,IAAIf,EAAEyB,EAAErC,EAAE,OAAO+B,EAAEF,EAAE/B,EAAEuC,EAAEP,GAAG/B,EAAE,GAAGF,GAAG,EAAED,EAAE,KAAK4D,EAAE9B,kBAAkBd,EAAEiC,EAAE9C,EAAEyC,KAAK5B,GAAG6C,GAAG,GAAG,EAAEQ,EAAE,CAAC,EAAEC,EAAE,SAAStE,EAAEC,GAAG,IAAIC,EAAEC,EAAEmC,IAAIJ,EAAE9B,EAAE,OAAO+B,EAAE,SAASnC,GAAG,IAAIC,EAAED,EAAE2C,UAAU1C,EAAEE,EAAEqC,kBAAkBN,EAAE5B,MAAML,EAAEiC,EAAE1B,QAAQoC,KAAK5C,GAAGE,IAAI,EAAEkC,EAAEpB,EAAE,2BAA2BmB,GAAG,GAAGC,EAAE,CAAClC,EAAE+B,EAAEjC,EAAEkC,EAAEjC,GAAG,IAAIwC,EAAE,WAAW4B,EAAEnC,EAAEzB,MAAM2B,EAAEkB,cAAchC,IAAIa,GAAGC,EAAEM,aAAa2B,EAAEnC,EAAEzB,KAAI,EAAGP,GAAE,GAAI,EAAE,CAAC,UAAU,SAASgE,SAAS,SAASlE,GAAG8B,iBAAiB9B,EAAEyC,EAAE,CAAC8B,MAAK,EAAGd,SAAQ,GAAI,IAAI/B,EAAEe,GAAE,GAAIV,GAAG,SAAS5B,GAAG+B,EAAE9B,EAAE,OAAOF,EAAE+B,EAAEjC,EAAEkC,EAAEjC,GAAG+C,uBAAuB,WAAWA,uBAAuB,WAAWd,EAAE5B,MAAMwC,YAAYlC,MAAMT,EAAEkC,UAAUgC,EAAEnC,EAAEzB,KAAI,EAAGP,GAAE,EAAG,GAAG,GAAG,GAAG,CAAC,EAAEsE,EAAE,SAASxE,GAAG,IAAIC,EAAEC,EAAEE,EAAE,QAAQH,EAAE,WAAW,IAAI,IAAIA,EAAE6C,YAAY2B,iBAAiB,cAAc,IAAI,WAAW,IAAIzE,EAAE8C,YAAY4B,OAAOzE,EAAE,CAAC6D,UAAU,aAAanB,UAAU,GAAG,IAAI,IAAIzC,KAAKF,EAAE,oBAAoBE,GAAG,WAAWA,IAAID,EAAEC,GAAGW,KAAK8D,IAAI3E,EAAEE,GAAGF,EAAE4E,gBAAgB,IAAI,OAAO3E,CAAC,CAAjL,GAAqL,GAAGC,EAAEI,MAAMJ,EAAEK,MAAMN,EAAE4E,cAAc3E,EAAEI,MAAM,GAAGJ,EAAEI,MAAMwC,YAAYlC,MAAM,OAAOV,EAAEM,QAAQ,CAACP,GAAGD,EAAEE,EAAE,CAAC,MAAMF,GAAG,CAAC,EAAE,aAAa2B,SAASmD,WAAWvC,WAAWtC,EAAE,GAAG6B,iBAAiB,QAAQ,WAAW,OAAOS,WAAWtC,EAAE,EAAE,GAAG,C","sources":["../node_modules/web-vitals/dist/web-vitals.js"],"sourcesContent":["var e,t,n,i,r=function(e,t){return{name:e,value:void 0===t?-1:t,delta:0,entries:[],id:\"v2-\".concat(Date.now(),\"-\").concat(Math.floor(8999999999999*Math.random())+1e12)}},a=function(e,t){try{if(PerformanceObserver.supportedEntryTypes.includes(e)){if(\"first-input\"===e&&!(\"PerformanceEventTiming\"in self))return;var n=new PerformanceObserver((function(e){return e.getEntries().map(t)}));return n.observe({type:e,buffered:!0}),n}}catch(e){}},o=function(e,t){var n=function n(i){\"pagehide\"!==i.type&&\"hidden\"!==document.visibilityState||(e(i),t&&(removeEventListener(\"visibilitychange\",n,!0),removeEventListener(\"pagehide\",n,!0)))};addEventListener(\"visibilitychange\",n,!0),addEventListener(\"pagehide\",n,!0)},u=function(e){addEventListener(\"pageshow\",(function(t){t.persisted&&e(t)}),!0)},c=function(e,t,n){var i;return function(r){t.value>=0&&(r||n)&&(t.delta=t.value-(i||0),(t.delta||void 0===i)&&(i=t.value,e(t)))}},f=-1,s=function(){return\"hidden\"===document.visibilityState?0:1/0},m=function(){o((function(e){var t=e.timeStamp;f=t}),!0)},v=function(){return f<0&&(f=s(),m(),u((function(){setTimeout((function(){f=s(),m()}),0)}))),{get firstHiddenTime(){return f}}},d=function(e,t){var n,i=v(),o=r(\"FCP\"),f=function(e){\"first-contentful-paint\"===e.name&&(m&&m.disconnect(),e.startTime-1&&e(t)},f=r(\"CLS\",0),s=0,m=[],v=function(e){if(!e.hadRecentInput){var t=m[0],i=m[m.length-1];s&&e.startTime-i.startTime<1e3&&e.startTime-t.startTime<5e3?(s+=e.value,m.push(e)):(s=e.value,m=[e]),s>f.value&&(f.value=s,f.entries=m,n())}},h=a(\"layout-shift\",v);h&&(n=c(i,f,t),o((function(){h.takeRecords().map(v),n(!0)})),u((function(){s=0,l=-1,f=r(\"CLS\",0),n=c(i,f,t)})))},T={passive:!0,capture:!0},y=new Date,g=function(i,r){e||(e=r,t=i,n=new Date,w(removeEventListener),E())},E=function(){if(t>=0&&t1e12?new Date:performance.now())-e.timeStamp;\"pointerdown\"==e.type?function(e,t){var n=function(){g(e,t),r()},i=function(){r()},r=function(){removeEventListener(\"pointerup\",n,T),removeEventListener(\"pointercancel\",i,T)};addEventListener(\"pointerup\",n,T),addEventListener(\"pointercancel\",i,T)}(t,e):g(t,e)}},w=function(e){[\"mousedown\",\"keydown\",\"touchstart\",\"pointerdown\"].forEach((function(t){return e(t,S,T)}))},L=function(n,f){var s,m=v(),d=r(\"FID\"),p=function(e){e.startTimeperformance.now())return;n.entries=[t],e(n)}catch(e){}},\"complete\"===document.readyState?setTimeout(t,0):addEventListener(\"load\",(function(){return setTimeout(t,0)}))};export{h as getCLS,d as getFCP,L as getFID,F as getLCP,P as getTTFB};\n"],"names":["e","t","n","i","r","name","value","delta","entries","id","concat","Date","now","Math","floor","random","a","PerformanceObserver","supportedEntryTypes","includes","self","getEntries","map","observe","type","buffered","o","document","visibilityState","removeEventListener","addEventListener","u","persisted","c","f","s","m","timeStamp","v","setTimeout","firstHiddenTime","d","disconnect","startTime","push","window","performance","getEntriesByName","requestAnimationFrame","p","l","h","hadRecentInput","length","takeRecords","T","passive","capture","y","g","w","E","entryType","target","cancelable","processingStart","forEach","S","L","b","F","once","P","getEntriesByType","timing","max","navigationStart","responseStart","readyState"],"sourceRoot":""} \ No newline at end of file diff --git a/static/js/main.f8aa3870.js b/static/js/main.f8aa3870.js new file mode 100644 index 0000000..361315a --- /dev/null +++ b/static/js/main.f8aa3870.js @@ -0,0 +1,3 @@ +/*! For license information please see main.f8aa3870.js.LICENSE.txt */ +(()=>{var e={5513:(e,t,n)=>{"use strict";n.d(t,{A:()=>oe});var r=function(){function e(e){var t=this;this._insertTag=function(e){var n;n=0===t.tags.length?t.insertionPoint?t.insertionPoint.nextSibling:t.prepend?t.container.firstChild:t.before:t.tags[t.tags.length-1].nextSibling,t.container.insertBefore(e,n),t.tags.push(e)},this.isSpeedy=void 0===e.speedy||e.speedy,this.tags=[],this.ctr=0,this.nonce=e.nonce,this.key=e.key,this.container=e.container,this.prepend=e.prepend,this.insertionPoint=e.insertionPoint,this.before=null}var t=e.prototype;return t.hydrate=function(e){e.forEach(this._insertTag)},t.insert=function(e){this.ctr%(this.isSpeedy?65e3:1)===0&&this._insertTag(function(e){var t=document.createElement("style");return t.setAttribute("data-emotion",e.key),void 0!==e.nonce&&t.setAttribute("nonce",e.nonce),t.appendChild(document.createTextNode("")),t.setAttribute("data-s",""),t}(this));var t=this.tags[this.tags.length-1];if(this.isSpeedy){var n=function(e){if(e.sheet)return e.sheet;for(var t=0;t0?c(w,--y):0,g--,10===b&&(g=1,m--),b}function E(){return b=y2||P(b)>3?"":" "}function M(e,t){for(;--t&&E()&&!(b<48||b>102||b>57&&b<65||b>70&&b<97););return R(e,A()+(t<6&&32==C()&&32==E()))}function L(e){for(;E();)switch(b){case e:return y;case 34:case 39:34!==e&&39!==e&&L(b);break;case 40:41===e&&L(e);break;case 92:E()}return y}function z(e,t){for(;E()&&e+b!==57&&(e+b!==84||47!==C()););return"/*"+R(t,y-1)+"*"+a(47===e?e:E())}function j(e){for(;!P(C());)E();return R(e,y)}var I="-ms-",F="-moz-",B="-webkit-",D="comm",W="rule",U="decl",$="@keyframes";function V(e,t){for(var n="",r=p(e),o=0;o0&&f(F)-v&&h(b>32?X(F+";",r,n,v-1):X(s(F," ","")+";",r,n,v-2),p);break;case 59:F+=";";default:if(h(I=G(F,t,n,m,g,o,d,T,O=[],L=[],v),i),123===P)if(0===g)K(F,t,I,I,O,i,v,d,L);else switch(99===y&&110===c(F,3)?100:y){case 100:case 108:case 109:case 115:K(e,I,I,r&&h(G(e,I,I,0,0,o,d,T,o,O=[],v),L),o,L,v,d,r?O:L);break;default:K(F,I,I,I,[""],L,0,d,L)}}m=g=b=0,x=R=1,T=F="",v=l;break;case 58:v=1+f(F),b=w;default:if(x<1)if(123==P)--x;else if(125==P&&0==x++&&125==k())continue;switch(F+=a(P),P*x){case 38:R=g>0?1:(F+="\f",-1);break;case 44:d[m++]=(f(F)-1)*R,R=1;break;case 64:45===C()&&(F+=N(E())),y=C(),g=v=f(T=F+=j(A())),P++;break;case 45:45===w&&2==f(F)&&(x=0)}}return i}function G(e,t,n,r,a,i,u,c,f,h,m){for(var g=a-1,v=0===a?i:[""],y=p(v),b=0,w=0,S=0;b0?v[k]+" "+E:s(E,/&\f/g,v[k])))&&(f[S++]=C);return x(e,t,n,0===a?W:c,f,h,m)}function Q(e,t,n){return x(e,t,n,D,a(b),d(e,2,-2),0)}function X(e,t,n,r){return x(e,t,n,U,d(e,0,r),d(e,r+1,-1),r)}var Y=function(e,t,n){for(var r=0,o=0;r=o,o=C(),38===r&&12===o&&(t[n]=1),!P(o);)E();return R(e,y)},J=function(e,t){return O(function(e,t){var n=-1,r=44;do{switch(P(r)){case 0:38===r&&12===C()&&(t[n]=1),e[n]+=Y(y-1,t,n);break;case 2:e[n]+=N(r);break;case 4:if(44===r){e[++n]=58===C()?"&\f":"",t[n]=e[n].length;break}default:e[n]+=a(r)}}while(r=E());return e}(T(e),t))},Z=new WeakMap,ee=function(e){if("rule"===e.type&&e.parent&&!(e.length<1)){for(var t=e.value,n=e.parent,r=e.column===n.column&&e.line===n.line;"rule"!==n.type;)if(!(n=n.parent))return;if((1!==e.props.length||58===t.charCodeAt(0)||Z.get(n))&&!r){Z.set(e,!0);for(var o=[],a=J(t,o),i=n.props,l=0,s=0;l6)switch(c(e,t+1)){case 109:if(45!==c(e,t+4))break;case 102:return s(e,/(.+:)(.+)-([^]+)/,"$1"+B+"$2-$3$1"+F+(108==c(e,t+3)?"$3":"$2-$3"))+e;case 115:return~u(e,"stretch")?ne(s(e,"stretch","fill-available"),t)+e:e}break;case 4949:if(115!==c(e,t+1))break;case 6444:switch(c(e,f(e)-3-(~u(e,"!important")&&10))){case 107:return s(e,":",":"+B)+e;case 101:return s(e,/(.+:)([^;!]+)(;|!.+)?/,"$1"+B+(45===c(e,14)?"inline-":"")+"box$3$1"+B+"$2$3$1"+I+"$2box$3")+e}break;case 5936:switch(c(e,t+11)){case 114:return B+e+I+s(e,/[svh]\w+-[tblr]{2}/,"tb")+e;case 108:return B+e+I+s(e,/[svh]\w+-[tblr]{2}/,"tb-rl")+e;case 45:return B+e+I+s(e,/[svh]\w+-[tblr]{2}/,"lr")+e}return B+e+I+e+e}return e}var re=[function(e,t,n,r){if(e.length>-1&&!e.return)switch(e.type){case U:e.return=ne(e.value,e.length);break;case $:return V([S(e,{value:s(e.value,"@","@"+B)})],r);case W:if(e.length)return function(e,t){return e.map(t).join("")}(e.props,(function(t){switch(function(e,t){return(e=t.exec(e))?e[0]:e}(t,/(::plac\w+|:read-\w+)/)){case":read-only":case":read-write":return V([S(e,{props:[s(t,/:(read-\w+)/,":-moz-$1")]})],r);case"::placeholder":return V([S(e,{props:[s(t,/:(plac\w+)/,":"+B+"input-$1")]}),S(e,{props:[s(t,/:(plac\w+)/,":-moz-$1")]}),S(e,{props:[s(t,/:(plac\w+)/,I+"input-$1")]})],r)}return""}))}}],oe=function(e){var t=e.key;if("css"===t){var n=document.querySelectorAll("style[data-emotion]:not([data-s])");Array.prototype.forEach.call(n,(function(e){-1!==e.getAttribute("data-emotion").indexOf(" ")&&(document.head.appendChild(e),e.setAttribute("data-s",""))}))}var o=e.stylisPlugins||re;var a,i,l={},s=[];a=e.container||document.head,Array.prototype.forEach.call(document.querySelectorAll('style[data-emotion^="'+t+' "]'),(function(e){for(var t=e.getAttribute("data-emotion").split(" "),n=1;n{"use strict";function r(e){var t=Object.create(null);return function(n){return void 0===t[n]&&(t[n]=e(n)),t[n]}}n.d(t,{A:()=>r})},5756:(e,t,n)=>{"use strict";n.d(t,{C:()=>l,T:()=>u,i:()=>a,w:()=>s});var r=n(5043),o=n(5513),a=(n(2830),n(9436),!0),i=r.createContext("undefined"!==typeof HTMLElement?(0,o.A)({key:"css"}):null);var l=i.Provider,s=function(e){return(0,r.forwardRef)((function(t,n){var o=(0,r.useContext)(i);return e(t,o,n)}))};a||(s=function(e){return function(t){var n=(0,r.useContext)(i);return null===n?(n=(0,o.A)({key:"css"}),r.createElement(i.Provider,{value:n},e(t,n))):e(t,n)}});var u=r.createContext({})},3290:(e,t,n)=>{"use strict";n.d(t,{AH:()=>u,i7:()=>c,mL:()=>s});var r=n(5756),o=n(5043),a=n(1722),i=n(9436),l=n(2830),s=(n(5513),n(219),(0,r.w)((function(e,t){var n=e.styles,s=(0,l.J)([n],void 0,o.useContext(r.T));if(!r.i){for(var u,c=s.name,d=s.styles,f=s.next;void 0!==f;)c+=" "+f.name,d+=f.styles,f=f.next;var p=!0===t.compat,h=t.insert("",{name:c,styles:d},t.sheet,p);return p?null:o.createElement("style",((u={})["data-emotion"]=t.key+"-global "+c,u.dangerouslySetInnerHTML={__html:h},u.nonce=t.sheet.nonce,u))}var m=o.useRef();return(0,i.i)((function(){var e=t.key+"-global",n=new t.sheet.constructor({key:e,nonce:t.sheet.nonce,container:t.sheet.container,speedy:t.sheet.isSpeedy}),r=!1,o=document.querySelector('style[data-emotion="'+e+" "+s.name+'"]');return t.sheet.tags.length&&(n.before=t.sheet.tags[0]),null!==o&&(r=!0,o.setAttribute("data-emotion",e),n.hydrate([o])),m.current=[n,r],function(){n.flush()}}),[t]),(0,i.i)((function(){var e=m.current,n=e[0];if(e[1])e[1]=!1;else{if(void 0!==s.next&&(0,a.sk)(t,s.next,!0),n.tags.length){var r=n.tags[n.tags.length-1].nextElementSibling;n.before=r,n.flush()}t.insert("",s,n,!1)}}),[t,s.name]),null})));function u(){for(var e=arguments.length,t=new Array(e),n=0;n{"use strict";n.d(t,{J:()=>h});var r={animationIterationCount:1,aspectRatio:1,borderImageOutset:1,borderImageSlice:1,borderImageWidth:1,boxFlex:1,boxFlexGroup:1,boxOrdinalGroup:1,columnCount:1,columns:1,flex:1,flexGrow:1,flexPositive:1,flexShrink:1,flexNegative:1,flexOrder:1,gridRow:1,gridRowEnd:1,gridRowSpan:1,gridRowStart:1,gridColumn:1,gridColumnEnd:1,gridColumnSpan:1,gridColumnStart:1,msGridRow:1,msGridRowSpan:1,msGridColumn:1,msGridColumnSpan:1,fontWeight:1,lineHeight:1,opacity:1,order:1,orphans:1,tabSize:1,widows:1,zIndex:1,zoom:1,WebkitLineClamp:1,fillOpacity:1,floodOpacity:1,stopOpacity:1,strokeDasharray:1,strokeDashoffset:1,strokeMiterlimit:1,strokeOpacity:1,strokeWidth:1},o=n(918),a=/[A-Z]|^ms/g,i=/_EMO_([^_]+?)_([^]*?)_EMO_/g,l=function(e){return 45===e.charCodeAt(1)},s=function(e){return null!=e&&"boolean"!==typeof e},u=(0,o.A)((function(e){return l(e)?e:e.replace(a,"-$&").toLowerCase()})),c=function(e,t){switch(e){case"animation":case"animationName":if("string"===typeof t)return t.replace(i,(function(e,t,n){return f={name:t,styles:n,next:f},t}))}return 1===r[e]||l(e)||"number"!==typeof t||0===t?t:t+"px"};function d(e,t,n){if(null==n)return"";if(void 0!==n.__emotion_styles)return n;switch(typeof n){case"boolean":return"";case"object":if(1===n.anim)return f={name:n.name,styles:n.styles,next:f},n.name;if(void 0!==n.styles){var r=n.next;if(void 0!==r)for(;void 0!==r;)f={name:r.name,styles:r.styles,next:f},r=r.next;return n.styles+";"}return function(e,t,n){var r="";if(Array.isArray(n))for(var o=0;o=4;++r,o-=4)t=1540483477*(65535&(t=255&e.charCodeAt(r)|(255&e.charCodeAt(++r))<<8|(255&e.charCodeAt(++r))<<16|(255&e.charCodeAt(++r))<<24))+(59797*(t>>>16)<<16),n=1540483477*(65535&(t^=t>>>24))+(59797*(t>>>16)<<16)^1540483477*(65535&n)+(59797*(n>>>16)<<16);switch(o){case 3:n^=(255&e.charCodeAt(r+2))<<16;case 2:n^=(255&e.charCodeAt(r+1))<<8;case 1:n=1540483477*(65535&(n^=255&e.charCodeAt(r)))+(59797*(n>>>16)<<16)}return(((n=1540483477*(65535&(n^=n>>>13))+(59797*(n>>>16)<<16))^n>>>15)>>>0).toString(36)}(o)+s;return{name:u,styles:o,next:f}}},9436:(e,t,n)=>{"use strict";var r;n.d(t,{i:()=>l,s:()=>i});var o=n(5043),a=!!(r||(r=n.t(o,2))).useInsertionEffect&&(r||(r=n.t(o,2))).useInsertionEffect,i=a||function(e){return e()},l=a||o.useLayoutEffect},1722:(e,t,n)=>{"use strict";n.d(t,{Rk:()=>r,SF:()=>o,sk:()=>a});function r(e,t,n){var r="";return n.split(" ").forEach((function(n){void 0!==e[n]?t.push(e[n]+";"):r+=n+" "})),r}var o=function(e,t,n){var r=e.key+"-"+t.name;!1===n&&void 0===e.registered[r]&&(e.registered[r]=t.styles)},a=function(e,t,n){o(e,t,n);var r=e.key+"-"+t.name;if(void 0===e.inserted[t.name]){var a=t;do{e.insert(t===a?"."+r:"",a,e.sheet,!0),a=a.next}while(void 0!==a)}}},5881:(e,t)=>{"use strict";var n,r=Symbol.for("react.element"),o=Symbol.for("react.portal"),a=Symbol.for("react.fragment"),i=Symbol.for("react.strict_mode"),l=Symbol.for("react.profiler"),s=Symbol.for("react.provider"),u=Symbol.for("react.context"),c=Symbol.for("react.server_context"),d=Symbol.for("react.forward_ref"),f=Symbol.for("react.suspense"),p=Symbol.for("react.suspense_list"),h=Symbol.for("react.memo"),m=Symbol.for("react.lazy"),g=Symbol.for("react.offscreen");function v(e){if("object"===typeof e&&null!==e){var t=e.$$typeof;switch(t){case r:switch(e=e.type){case a:case l:case i:case f:case p:return e;default:switch(e=e&&e.$$typeof){case c:case u:case d:case m:case h:case s:return e;default:return t}}case o:return t}}}n=Symbol.for("react.module.reference")},805:(e,t,n)=>{"use strict";n(5881)},869:(e,t,n)=>{"use strict";n.d(t,{A:()=>a});n(5043);var r=n(3290),o=n(579);function a(e){const{styles:t,defaultTheme:n={}}=e,a="function"===typeof t?e=>{return t(void 0===(r=e)||null===r||0===Object.keys(r).length?n:e);var r}:t;return(0,o.jsx)(r.mL,{styles:a})}},3174:(e,t,n)=>{"use strict";n.r(t),n.d(t,{GlobalStyles:()=>k.A,StyledEngineProvider:()=>S,ThemeContext:()=>s.T,css:()=>y.AH,default:()=>E,internal_processStyles:()=>C,keyframes:()=>y.i7});var r=n(8168),o=n(5043),a=n(918),i=/^((children|dangerouslySetInnerHTML|key|ref|autoFocus|defaultValue|defaultChecked|innerHTML|suppressContentEditableWarning|suppressHydrationWarning|valueLink|abbr|accept|acceptCharset|accessKey|action|allow|allowUserMedia|allowPaymentRequest|allowFullScreen|allowTransparency|alt|async|autoComplete|autoPlay|capture|cellPadding|cellSpacing|challenge|charSet|checked|cite|classID|className|cols|colSpan|content|contentEditable|contextMenu|controls|controlsList|coords|crossOrigin|data|dateTime|decoding|default|defer|dir|disabled|disablePictureInPicture|disableRemotePlayback|download|draggable|encType|enterKeyHint|form|formAction|formEncType|formMethod|formNoValidate|formTarget|frameBorder|headers|height|hidden|high|href|hrefLang|htmlFor|httpEquiv|id|inputMode|integrity|is|keyParams|keyType|kind|label|lang|list|loading|loop|low|marginHeight|marginWidth|max|maxLength|media|mediaGroup|method|min|minLength|multiple|muted|name|nonce|noValidate|open|optimum|pattern|placeholder|playsInline|poster|preload|profile|radioGroup|readOnly|referrerPolicy|rel|required|reversed|role|rows|rowSpan|sandbox|scope|scoped|scrolling|seamless|selected|shape|size|sizes|slot|span|spellCheck|src|srcDoc|srcLang|srcSet|start|step|style|summary|tabIndex|target|title|translate|type|useMap|value|width|wmode|wrap|about|datatype|inlist|prefix|property|resource|typeof|vocab|autoCapitalize|autoCorrect|autoSave|color|incremental|fallback|inert|itemProp|itemScope|itemType|itemID|itemRef|on|option|results|security|unselectable|accentHeight|accumulate|additive|alignmentBaseline|allowReorder|alphabetic|amplitude|arabicForm|ascent|attributeName|attributeType|autoReverse|azimuth|baseFrequency|baselineShift|baseProfile|bbox|begin|bias|by|calcMode|capHeight|clip|clipPathUnits|clipPath|clipRule|colorInterpolation|colorInterpolationFilters|colorProfile|colorRendering|contentScriptType|contentStyleType|cursor|cx|cy|d|decelerate|descent|diffuseConstant|direction|display|divisor|dominantBaseline|dur|dx|dy|edgeMode|elevation|enableBackground|end|exponent|externalResourcesRequired|fill|fillOpacity|fillRule|filter|filterRes|filterUnits|floodColor|floodOpacity|focusable|fontFamily|fontSize|fontSizeAdjust|fontStretch|fontStyle|fontVariant|fontWeight|format|from|fr|fx|fy|g1|g2|glyphName|glyphOrientationHorizontal|glyphOrientationVertical|glyphRef|gradientTransform|gradientUnits|hanging|horizAdvX|horizOriginX|ideographic|imageRendering|in|in2|intercept|k|k1|k2|k3|k4|kernelMatrix|kernelUnitLength|kerning|keyPoints|keySplines|keyTimes|lengthAdjust|letterSpacing|lightingColor|limitingConeAngle|local|markerEnd|markerMid|markerStart|markerHeight|markerUnits|markerWidth|mask|maskContentUnits|maskUnits|mathematical|mode|numOctaves|offset|opacity|operator|order|orient|orientation|origin|overflow|overlinePosition|overlineThickness|panose1|paintOrder|pathLength|patternContentUnits|patternTransform|patternUnits|pointerEvents|points|pointsAtX|pointsAtY|pointsAtZ|preserveAlpha|preserveAspectRatio|primitiveUnits|r|radius|refX|refY|renderingIntent|repeatCount|repeatDur|requiredExtensions|requiredFeatures|restart|result|rotate|rx|ry|scale|seed|shapeRendering|slope|spacing|specularConstant|specularExponent|speed|spreadMethod|startOffset|stdDeviation|stemh|stemv|stitchTiles|stopColor|stopOpacity|strikethroughPosition|strikethroughThickness|string|stroke|strokeDasharray|strokeDashoffset|strokeLinecap|strokeLinejoin|strokeMiterlimit|strokeOpacity|strokeWidth|surfaceScale|systemLanguage|tableValues|targetX|targetY|textAnchor|textDecoration|textRendering|textLength|to|transform|u1|u2|underlinePosition|underlineThickness|unicode|unicodeBidi|unicodeRange|unitsPerEm|vAlphabetic|vHanging|vIdeographic|vMathematical|values|vectorEffect|version|vertAdvY|vertOriginX|vertOriginY|viewBox|viewTarget|visibility|widths|wordSpacing|writingMode|x|xHeight|x1|x2|xChannelSelector|xlinkActuate|xlinkArcrole|xlinkHref|xlinkRole|xlinkShow|xlinkTitle|xlinkType|xmlBase|xmlns|xmlnsXlink|xmlLang|xmlSpace|y|y1|y2|yChannelSelector|z|zoomAndPan|for|class|autofocus)|(([Dd][Aa][Tt][Aa]|[Aa][Rr][Ii][Aa]|x)-.*))$/,l=(0,a.A)((function(e){return i.test(e)||111===e.charCodeAt(0)&&110===e.charCodeAt(1)&&e.charCodeAt(2)<91})),s=n(5756),u=n(1722),c=n(2830),d=n(9436),f=l,p=function(e){return"theme"!==e},h=function(e){return"string"===typeof e&&e.charCodeAt(0)>96?f:p},m=function(e,t,n){var r;if(t){var o=t.shouldForwardProp;r=e.__emotion_forwardProp&&o?function(t){return e.__emotion_forwardProp(t)&&o(t)}:o}return"function"!==typeof r&&n&&(r=e.__emotion_forwardProp),r},g=function(e){var t=e.cache,n=e.serialized,r=e.isStringTag;return(0,u.SF)(t,n,r),(0,d.s)((function(){return(0,u.sk)(t,n,r)})),null},v=function e(t,n){var a,i,l=t.__emotion_real===t,d=l&&t.__emotion_base||t;void 0!==n&&(a=n.label,i=n.target);var f=m(t,n,l),p=f||h(d),v=!p("as");return function(){var y=arguments,b=l&&void 0!==t.__emotion_styles?t.__emotion_styles.slice(0):[];if(void 0!==a&&b.push("label:"+a+";"),null==y[0]||void 0===y[0].raw)b.push.apply(b,y);else{0,b.push(y[0][0]);for(var w=y.length,x=1;x{Array.isArray(e.__emotion_styles)&&(e.__emotion_styles=t(e.__emotion_styles))}},7266:(e,t,n)=>{"use strict";var r=n(4994);t.X4=p,t.e$=h,t.eM=function(e,t){const n=f(e),r=f(t);return(Math.max(n,r)+.05)/(Math.min(n,r)+.05)},t.a=m;var o=r(n(7245)),a=r(n(1098));function i(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1;return(0,a.default)(e,t,n)}function l(e){e=e.slice(1);const t=new RegExp(".{1,".concat(e.length>=6?2:1,"}"),"g");let n=e.match(t);return n&&1===n[0].length&&(n=n.map((e=>e+e))),n?"rgb".concat(4===n.length?"a":"","(").concat(n.map(((e,t)=>t<3?parseInt(e,16):Math.round(parseInt(e,16)/255*1e3)/1e3)).join(", "),")"):""}function s(e){if(e.type)return e;if("#"===e.charAt(0))return s(l(e));const t=e.indexOf("("),n=e.substring(0,t);if(-1===["rgb","rgba","hsl","hsla","color"].indexOf(n))throw new Error((0,o.default)(9,e));let r,a=e.substring(t+1,e.length-1);if("color"===n){if(a=a.split(" "),r=a.shift(),4===a.length&&"/"===a[3].charAt(0)&&(a[3]=a[3].slice(1)),-1===["srgb","display-p3","a98-rgb","prophoto-rgb","rec-2020"].indexOf(r))throw new Error((0,o.default)(10,r))}else a=a.split(",");return a=a.map((e=>parseFloat(e))),{type:n,values:a,colorSpace:r}}const u=e=>{const t=s(e);return t.values.slice(0,3).map(((e,n)=>-1!==t.type.indexOf("hsl")&&0!==n?"".concat(e,"%"):e)).join(" ")};function c(e){const{type:t,colorSpace:n}=e;let{values:r}=e;return-1!==t.indexOf("rgb")?r=r.map(((e,t)=>t<3?parseInt(e,10):e)):-1!==t.indexOf("hsl")&&(r[1]="".concat(r[1],"%"),r[2]="".concat(r[2],"%")),r=-1!==t.indexOf("color")?"".concat(n," ").concat(r.join(" ")):"".concat(r.join(", ")),"".concat(t,"(").concat(r,")")}function d(e){e=s(e);const{values:t}=e,n=t[0],r=t[1]/100,o=t[2]/100,a=r*Math.min(o,1-o),i=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:(e+n/30)%12;return o-a*Math.max(Math.min(t-3,9-t,1),-1)};let l="rgb";const u=[Math.round(255*i(0)),Math.round(255*i(8)),Math.round(255*i(4))];return"hsla"===e.type&&(l+="a",u.push(t[3])),c({type:l,values:u})}function f(e){let t="hsl"===(e=s(e)).type||"hsla"===e.type?s(d(e)).values:e.values;return t=t.map((t=>("color"!==e.type&&(t/=255),t<=.03928?t/12.92:((t+.055)/1.055)**2.4))),Number((.2126*t[0]+.7152*t[1]+.0722*t[2]).toFixed(3))}function p(e,t){return e=s(e),t=i(t),"rgb"!==e.type&&"hsl"!==e.type||(e.type+="a"),"color"===e.type?e.values[3]="/".concat(t):e.values[3]=t,c(e)}function h(e,t){if(e=s(e),t=i(t),-1!==e.type.indexOf("hsl"))e.values[2]*=1-t;else if(-1!==e.type.indexOf("rgb")||-1!==e.type.indexOf("color"))for(let n=0;n<3;n+=1)e.values[n]*=1-t;return c(e)}function m(e,t){if(e=s(e),t=i(t),-1!==e.type.indexOf("hsl"))e.values[2]+=(100-e.values[2])*t;else if(-1!==e.type.indexOf("rgb"))for(let n=0;n<3;n+=1)e.values[n]+=(255-e.values[n])*t;else if(-1!==e.type.indexOf("color"))for(let n=0;n<3;n+=1)e.values[n]+=(1-e.values[n])*t;return c(e)}function g(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:.15;return f(e)>.5?h(e,t):m(e,t)}},8052:(e,t,n)=>{"use strict";var r=n(4994);t.Ay=function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};const{themeId:t,defaultTheme:n=m,rootShouldForwardProp:r=h,slotShouldForwardProp:s=h}=e,c=e=>(0,u.default)((0,o.default)({},e,{theme:v((0,o.default)({},e,{defaultTheme:n,themeId:t}))}));return c.__mui_systemSx=!0,function(e){let u=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};(0,i.internal_processStyles)(e,(e=>e.filter((e=>!(null!=e&&e.__mui_systemSx)))));const{name:d,slot:p,skipVariantsResolver:m,skipSx:w,overridesResolver:x=y(g(p))}=u,S=(0,a.default)(u,f),k=void 0!==m?m:p&&"Root"!==p&&"root"!==p||!1,E=w||!1;let C=h;"Root"===p||"root"===p?C=r:p?C=s:function(e){return"string"===typeof e&&e.charCodeAt(0)>96}(e)&&(C=void 0);const A=(0,i.default)(e,(0,o.default)({shouldForwardProp:C,label:undefined},S)),R=e=>"function"===typeof e&&e.__emotion_real!==e||(0,l.isPlainObject)(e)?r=>b(e,(0,o.default)({},r,{theme:v({theme:r.theme,defaultTheme:n,themeId:t})})):e,P=function(r){let a=R(r);for(var i=arguments.length,l=new Array(i>1?i-1:0),s=1;s{const r=v((0,o.default)({},e,{defaultTheme:n,themeId:t}));if(!r.components||!r.components[d]||!r.components[d].styleOverrides)return null;const a=r.components[d].styleOverrides,i={};return Object.entries(a).forEach((t=>{let[n,a]=t;i[n]=b(a,(0,o.default)({},e,{theme:r}))})),x(e,i)})),d&&!k&&u.push((e=>{var r;const a=v((0,o.default)({},e,{defaultTheme:n,themeId:t}));return b({variants:null==a||null==(r=a.components)||null==(r=r[d])?void 0:r.variants},(0,o.default)({},e,{theme:a}))})),E||u.push(c);const f=u.length-l.length;if(Array.isArray(r)&&f>0){const e=new Array(f).fill("");a=[...r,...e],a.raw=[...r.raw,...e]}const p=A(a,...u);return e.muiName&&(p.muiName=e.muiName),p};return A.withConfig&&(P.withConfig=A.withConfig),P}};var o=r(n(4634)),a=r(n(4893)),i=function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=p(t);if(n&&n.has(e))return n.get(e);var r={__proto__:null},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var a in e)if("default"!==a&&Object.prototype.hasOwnProperty.call(e,a)){var i=o?Object.getOwnPropertyDescriptor(e,a):null;i&&(i.get||i.set)?Object.defineProperty(r,a,i):r[a]=e[a]}return r.default=e,n&&n.set(e,r),r}(n(3174)),l=n(4534),s=(r(n(578)),r(n(2046)),r(n(4989))),u=r(n(3234));const c=["ownerState"],d=["variants"],f=["name","slot","skipVariantsResolver","skipSx","overridesResolver"];function p(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(p=function(e){return e?n:t})(e)}function h(e){return"ownerState"!==e&&"theme"!==e&&"sx"!==e&&"as"!==e}const m=(0,s.default)(),g=e=>e?e.charAt(0).toLowerCase()+e.slice(1):e;function v(e){let{defaultTheme:t,theme:n,themeId:r}=e;return o=n,0===Object.keys(o).length?t:n[r]||n;var o}function y(e){return e?(t,n)=>n[e]:null}function b(e,t){let{ownerState:n}=t,r=(0,a.default)(t,c);const i="function"===typeof e?e((0,o.default)({ownerState:n},r)):e;if(Array.isArray(i))return i.flatMap((e=>b(e,(0,o.default)({ownerState:n},r))));if(i&&"object"===typeof i&&Array.isArray(i.variants)){const{variants:e=[]}=i;let t=(0,a.default)(i,d);return e.forEach((e=>{let a=!0;"function"===typeof e.props?a=e.props((0,o.default)({ownerState:n},r,n)):Object.keys(e.props).forEach((t=>{(null==n?void 0:n[t])!==e.props[t]&&r[t]!==e.props[t]&&(a=!1)})),a&&(Array.isArray(t)||(t=[t]),t.push("function"===typeof e.style?e.style((0,o.default)({ownerState:n},r,n)):e.style))})),t}return i}},9751:(e,t,n)=>{"use strict";n.d(t,{EU:()=>i,NI:()=>a,vf:()=>l,zu:()=>r});const r={xs:0,sm:600,md:900,lg:1200,xl:1536},o={keys:["xs","sm","md","lg","xl"],up:e=>"@media (min-width:".concat(r[e],"px)")};function a(e,t,n){const a=e.theme||{};if(Array.isArray(t)){const e=a.breakpoints||o;return t.reduce(((r,o,a)=>(r[e.up(e.keys[a])]=n(t[a]),r)),{})}if("object"===typeof t){const e=a.breakpoints||o;return Object.keys(t).reduce(((o,a)=>{if(-1!==Object.keys(e.values||r).indexOf(a)){o[e.up(a)]=n(t[a],a)}else{const e=a;o[e]=t[e]}return o}),{})}return n(t)}function i(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};var t;return(null==(t=e.keys)?void 0:t.reduce(((t,n)=>(t[e.up(n)]={},t)),{}))||{}}function l(e,t){return e.reduce(((e,t)=>{const n=e[t];return(!n||0===Object.keys(n).length)&&delete e[t],e}),t)}},9703:(e,t,n)=>{"use strict";function r(e,t){const n=this;if(n.vars&&"function"===typeof n.getColorSchemeSelector){const r=n.getColorSchemeSelector(e).replace(/(\[[^\]]+\])/,"*:where($1)");return{[r]:t}}return n.palette.mode===e?t:{}}n.d(t,{A:()=>r})},4853:(e,t,n)=>{"use strict";n.d(t,{A:()=>l});var r=n(8587),o=n(8168);const a=["values","unit","step"],i=e=>{const t=Object.keys(e).map((t=>({key:t,val:e[t]})))||[];return t.sort(((e,t)=>e.val-t.val)),t.reduce(((e,t)=>(0,o.A)({},e,{[t.key]:t.val})),{})};function l(e){const{values:t={xs:0,sm:600,md:900,lg:1200,xl:1536},unit:n="px",step:l=5}=e,s=(0,r.A)(e,a),u=i(t),c=Object.keys(u);function d(e){const r="number"===typeof t[e]?t[e]:e;return"@media (min-width:".concat(r).concat(n,")")}function f(e){const r="number"===typeof t[e]?t[e]:e;return"@media (max-width:".concat(r-l/100).concat(n,")")}function p(e,r){const o=c.indexOf(r);return"@media (min-width:".concat("number"===typeof t[e]?t[e]:e).concat(n,") and ")+"(max-width:".concat((-1!==o&&"number"===typeof t[c[o]]?t[c[o]]:r)-l/100).concat(n,")")}return(0,o.A)({keys:c,values:u,up:d,down:f,between:p,only:function(e){return c.indexOf(e)+1{"use strict";n.d(t,{A:()=>p});var r=n(8168),o=n(8587),a=n(3216),i=n(4853);const l={borderRadius:4};var s=n(8604);var u=n(8812),c=n(7758),d=n(9703);const f=["breakpoints","palette","spacing","shape"];const p=function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};const{breakpoints:t={},palette:n={},spacing:p,shape:h={}}=e,m=(0,o.A)(e,f),g=(0,i.A)(t),v=function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:8;if(e.mui)return e;const t=(0,s.LX)({spacing:e}),n=function(){for(var e=arguments.length,n=new Array(e),r=0;r{const n=t(e);return"number"===typeof n?"".concat(n,"px"):n})).join(" ")};return n.mui=!0,n}(p);let y=(0,a.A)({breakpoints:g,direction:"ltr",components:{},palette:(0,r.A)({mode:"light"},n),spacing:v,shape:(0,r.A)({},l,h)},m);y.applyStyles=d.A;for(var b=arguments.length,w=new Array(b>1?b-1:0),x=1;x(0,a.A)(e,t)),y),y.unstable_sxConfig=(0,r.A)({},c.A,null==m?void 0:m.unstable_sxConfig),y.unstable_sx=function(e){return(0,u.A)({sx:e,theme:this})},y}},4989:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>r.A,private_createBreakpoints:()=>o.A,unstable_applyStyles:()=>a.A});var r=n(8280),o=n(4853),a=n(9703)},3815:(e,t,n)=>{"use strict";n.d(t,{A:()=>o});var r=n(3216);const o=function(e,t){return t?(0,r.A)(e,t,{clone:!1}):e}},8604:(e,t,n)=>{"use strict";n.d(t,{LX:()=>h,MA:()=>p,_W:()=>m,Lc:()=>y,Ms:()=>b});var r=n(9751),o=n(7162),a=n(3815);const i={m:"margin",p:"padding"},l={t:"Top",r:"Right",b:"Bottom",l:"Left",x:["Left","Right"],y:["Top","Bottom"]},s={marginX:"mx",marginY:"my",paddingX:"px",paddingY:"py"},u=function(e){const t={};return n=>(void 0===t[n]&&(t[n]=e(n)),t[n])}((e=>{if(e.length>2){if(!s[e])return[e];e=s[e]}const[t,n]=e.split(""),r=i[t],o=l[n]||"";return Array.isArray(o)?o.map((e=>r+e)):[r+o]})),c=["m","mt","mr","mb","ml","mx","my","margin","marginTop","marginRight","marginBottom","marginLeft","marginX","marginY","marginInline","marginInlineStart","marginInlineEnd","marginBlock","marginBlockStart","marginBlockEnd"],d=["p","pt","pr","pb","pl","px","py","padding","paddingTop","paddingRight","paddingBottom","paddingLeft","paddingX","paddingY","paddingInline","paddingInlineStart","paddingInlineEnd","paddingBlock","paddingBlockStart","paddingBlockEnd"],f=[...c,...d];function p(e,t,n,r){var a;const i=null!=(a=(0,o.Yn)(e,t,!1))?a:n;return"number"===typeof i?e=>"string"===typeof e?e:i*e:Array.isArray(i)?e=>"string"===typeof e?e:i[e]:"function"===typeof i?i:()=>{}}function h(e){return p(e,"spacing",8)}function m(e,t){if("string"===typeof t||null==t)return t;const n=e(Math.abs(t));return t>=0?n:"number"===typeof n?-n:"-".concat(n)}function g(e,t,n,o){if(-1===t.indexOf(n))return null;const a=function(e,t){return n=>e.reduce(((e,r)=>(e[r]=m(t,n),e)),{})}(u(n),o),i=e[n];return(0,r.NI)(e,i,a)}function v(e,t){const n=h(e.theme);return Object.keys(e).map((r=>g(e,t,r,n))).reduce(a.A,{})}function y(e){return v(e,c)}function b(e){return v(e,d)}function w(e){return v(e,f)}y.propTypes={},y.filterProps=c,b.propTypes={},b.filterProps=d,w.propTypes={},w.filterProps=f},7162:(e,t,n)=>{"use strict";n.d(t,{Ay:()=>l,BO:()=>i,Yn:()=>a});var r=n(410),o=n(9751);function a(e,t){let n=!(arguments.length>2&&void 0!==arguments[2])||arguments[2];if(!t||"string"!==typeof t)return null;if(e&&e.vars&&n){const n="vars.".concat(t).split(".").reduce(((e,t)=>e&&e[t]?e[t]:null),e);if(null!=n)return n}return t.split(".").reduce(((e,t)=>e&&null!=e[t]?e[t]:null),e)}function i(e,t,n){let r,o=arguments.length>3&&void 0!==arguments[3]?arguments[3]:n;return r="function"===typeof e?e(n):Array.isArray(e)?e[n]||o:a(e,n)||o,t&&(r=t(r,o,e)),r}const l=function(e){const{prop:t,cssProperty:n=e.prop,themeKey:l,transform:s}=e,u=e=>{if(null==e[t])return null;const u=e[t],c=a(e.theme,l)||{};return(0,o.NI)(e,u,(e=>{let o=i(c,s,e);return e===o&&"string"===typeof e&&(o=i(c,s,"".concat(t).concat("default"===e?"":(0,r.A)(e)),e)),!1===n?o:{[n]:o}}))};return u.propTypes={},u.filterProps=[t],u}},7758:(e,t,n)=>{"use strict";n.d(t,{A:()=>L});var r=n(8604),o=n(7162),a=n(3815);const i=function(){for(var e=arguments.length,t=new Array(e),n=0;n(t.filterProps.forEach((n=>{e[n]=t})),e)),{}),o=e=>Object.keys(e).reduce(((t,n)=>r[n]?(0,a.A)(t,r[n](e)):t),{});return o.propTypes={},o.filterProps=t.reduce(((e,t)=>e.concat(t.filterProps)),[]),o};var l=n(9751);function s(e){return"number"!==typeof e?e:"".concat(e,"px solid")}function u(e,t){return(0,o.Ay)({prop:e,themeKey:"borders",transform:t})}const c=u("border",s),d=u("borderTop",s),f=u("borderRight",s),p=u("borderBottom",s),h=u("borderLeft",s),m=u("borderColor"),g=u("borderTopColor"),v=u("borderRightColor"),y=u("borderBottomColor"),b=u("borderLeftColor"),w=u("outline",s),x=u("outlineColor"),S=e=>{if(void 0!==e.borderRadius&&null!==e.borderRadius){const t=(0,r.MA)(e.theme,"shape.borderRadius",4,"borderRadius"),n=e=>({borderRadius:(0,r._W)(t,e)});return(0,l.NI)(e,e.borderRadius,n)}return null};S.propTypes={},S.filterProps=["borderRadius"];i(c,d,f,p,h,m,g,v,y,b,S,w,x);const k=e=>{if(void 0!==e.gap&&null!==e.gap){const t=(0,r.MA)(e.theme,"spacing",8,"gap"),n=e=>({gap:(0,r._W)(t,e)});return(0,l.NI)(e,e.gap,n)}return null};k.propTypes={},k.filterProps=["gap"];const E=e=>{if(void 0!==e.columnGap&&null!==e.columnGap){const t=(0,r.MA)(e.theme,"spacing",8,"columnGap"),n=e=>({columnGap:(0,r._W)(t,e)});return(0,l.NI)(e,e.columnGap,n)}return null};E.propTypes={},E.filterProps=["columnGap"];const C=e=>{if(void 0!==e.rowGap&&null!==e.rowGap){const t=(0,r.MA)(e.theme,"spacing",8,"rowGap"),n=e=>({rowGap:(0,r._W)(t,e)});return(0,l.NI)(e,e.rowGap,n)}return null};C.propTypes={},C.filterProps=["rowGap"];i(k,E,C,(0,o.Ay)({prop:"gridColumn"}),(0,o.Ay)({prop:"gridRow"}),(0,o.Ay)({prop:"gridAutoFlow"}),(0,o.Ay)({prop:"gridAutoColumns"}),(0,o.Ay)({prop:"gridAutoRows"}),(0,o.Ay)({prop:"gridTemplateColumns"}),(0,o.Ay)({prop:"gridTemplateRows"}),(0,o.Ay)({prop:"gridTemplateAreas"}),(0,o.Ay)({prop:"gridArea"}));function A(e,t){return"grey"===t?t:e}i((0,o.Ay)({prop:"color",themeKey:"palette",transform:A}),(0,o.Ay)({prop:"bgcolor",cssProperty:"backgroundColor",themeKey:"palette",transform:A}),(0,o.Ay)({prop:"backgroundColor",themeKey:"palette",transform:A}));function R(e){return e<=1&&0!==e?"".concat(100*e,"%"):e}const P=(0,o.Ay)({prop:"width",transform:R}),T=e=>{if(void 0!==e.maxWidth&&null!==e.maxWidth){const t=t=>{var n,r;const o=(null==(n=e.theme)||null==(n=n.breakpoints)||null==(n=n.values)?void 0:n[t])||l.zu[t];return o?"px"!==(null==(r=e.theme)||null==(r=r.breakpoints)?void 0:r.unit)?{maxWidth:"".concat(o).concat(e.theme.breakpoints.unit)}:{maxWidth:o}:{maxWidth:R(t)}};return(0,l.NI)(e,e.maxWidth,t)}return null};T.filterProps=["maxWidth"];const O=(0,o.Ay)({prop:"minWidth",transform:R}),N=(0,o.Ay)({prop:"height",transform:R}),_=(0,o.Ay)({prop:"maxHeight",transform:R}),M=(0,o.Ay)({prop:"minHeight",transform:R}),L=((0,o.Ay)({prop:"size",cssProperty:"width",transform:R}),(0,o.Ay)({prop:"size",cssProperty:"height",transform:R}),i(P,T,O,N,_,M,(0,o.Ay)({prop:"boxSizing"})),{border:{themeKey:"borders",transform:s},borderTop:{themeKey:"borders",transform:s},borderRight:{themeKey:"borders",transform:s},borderBottom:{themeKey:"borders",transform:s},borderLeft:{themeKey:"borders",transform:s},borderColor:{themeKey:"palette"},borderTopColor:{themeKey:"palette"},borderRightColor:{themeKey:"palette"},borderBottomColor:{themeKey:"palette"},borderLeftColor:{themeKey:"palette"},outline:{themeKey:"borders",transform:s},outlineColor:{themeKey:"palette"},borderRadius:{themeKey:"shape.borderRadius",style:S},color:{themeKey:"palette",transform:A},bgcolor:{themeKey:"palette",cssProperty:"backgroundColor",transform:A},backgroundColor:{themeKey:"palette",transform:A},p:{style:r.Ms},pt:{style:r.Ms},pr:{style:r.Ms},pb:{style:r.Ms},pl:{style:r.Ms},px:{style:r.Ms},py:{style:r.Ms},padding:{style:r.Ms},paddingTop:{style:r.Ms},paddingRight:{style:r.Ms},paddingBottom:{style:r.Ms},paddingLeft:{style:r.Ms},paddingX:{style:r.Ms},paddingY:{style:r.Ms},paddingInline:{style:r.Ms},paddingInlineStart:{style:r.Ms},paddingInlineEnd:{style:r.Ms},paddingBlock:{style:r.Ms},paddingBlockStart:{style:r.Ms},paddingBlockEnd:{style:r.Ms},m:{style:r.Lc},mt:{style:r.Lc},mr:{style:r.Lc},mb:{style:r.Lc},ml:{style:r.Lc},mx:{style:r.Lc},my:{style:r.Lc},margin:{style:r.Lc},marginTop:{style:r.Lc},marginRight:{style:r.Lc},marginBottom:{style:r.Lc},marginLeft:{style:r.Lc},marginX:{style:r.Lc},marginY:{style:r.Lc},marginInline:{style:r.Lc},marginInlineStart:{style:r.Lc},marginInlineEnd:{style:r.Lc},marginBlock:{style:r.Lc},marginBlockStart:{style:r.Lc},marginBlockEnd:{style:r.Lc},displayPrint:{cssProperty:!1,transform:e=>({"@media print":{display:e}})},display:{},overflow:{},textOverflow:{},visibility:{},whiteSpace:{},flexBasis:{},flexDirection:{},flexWrap:{},justifyContent:{},alignItems:{},alignContent:{},order:{},flex:{},flexGrow:{},flexShrink:{},alignSelf:{},justifyItems:{},justifySelf:{},gap:{style:k},rowGap:{style:C},columnGap:{style:E},gridColumn:{},gridRow:{},gridAutoFlow:{},gridAutoColumns:{},gridAutoRows:{},gridTemplateColumns:{},gridTemplateRows:{},gridTemplateAreas:{},gridArea:{},position:{},zIndex:{themeKey:"zIndex"},top:{},right:{},bottom:{},left:{},boxShadow:{themeKey:"shadows"},width:{transform:R},maxWidth:{style:T},minWidth:{transform:R},height:{transform:R},maxHeight:{transform:R},minHeight:{transform:R},boxSizing:{},fontFamily:{themeKey:"typography"},fontSize:{themeKey:"typography"},fontStyle:{themeKey:"typography"},fontWeight:{themeKey:"typography"},letterSpacing:{},textTransform:{},lineHeight:{},textAlign:{},typography:{cssProperty:!1,themeKey:"typography"}})},8698:(e,t,n)=>{"use strict";n.d(t,{A:()=>u});var r=n(8168),o=n(8587),a=n(3216),i=n(7758);const l=["sx"],s=e=>{var t,n;const r={systemProps:{},otherProps:{}},o=null!=(t=null==e||null==(n=e.theme)?void 0:n.unstable_sxConfig)?t:i.A;return Object.keys(e).forEach((t=>{o[t]?r.systemProps[t]=e[t]:r.otherProps[t]=e[t]})),r};function u(e){const{sx:t}=e,n=(0,o.A)(e,l),{systemProps:i,otherProps:u}=s(n);let c;return c=Array.isArray(t)?[i,...t]:"function"===typeof t?function(){const e=t(...arguments);return(0,a.Q)(e)?(0,r.A)({},i,e):i}:(0,r.A)({},i,t),(0,r.A)({},u,{sx:c})}},3234:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>r.A,extendSxProp:()=>o.A,unstable_createStyleFunctionSx:()=>r.k,unstable_defaultSxConfig:()=>a.A});var r=n(8812),o=n(8698),a=n(7758)},8812:(e,t,n)=>{"use strict";n.d(t,{A:()=>c,k:()=>s});var r=n(410),o=n(3815),a=n(7162),i=n(9751),l=n(7758);function s(){function e(e,t,n,o){const l={[e]:t,theme:n},s=o[e];if(!s)return{[e]:t};const{cssProperty:u=e,themeKey:c,transform:d,style:f}=s;if(null==t)return null;if("typography"===c&&"inherit"===t)return{[e]:t};const p=(0,a.Yn)(n,c)||{};if(f)return f(l);return(0,i.NI)(l,t,(t=>{let n=(0,a.BO)(p,d,t);return t===n&&"string"===typeof t&&(n=(0,a.BO)(p,d,"".concat(e).concat("default"===t?"":(0,r.A)(t)),t)),!1===u?n:{[u]:n}}))}return function t(n){var r;const{sx:a,theme:s={}}=n||{};if(!a)return null;const u=null!=(r=s.unstable_sxConfig)?r:l.A;function c(n){let r=n;if("function"===typeof n)r=n(s);else if("object"!==typeof n)return n;if(!r)return null;const a=(0,i.EU)(s.breakpoints),l=Object.keys(a);let c=a;return Object.keys(r).forEach((n=>{const a=(l=r[n],d=s,"function"===typeof l?l(d):l);var l,d;if(null!==a&&void 0!==a)if("object"===typeof a)if(u[n])c=(0,o.A)(c,e(n,a,s,u));else{const e=(0,i.NI)({theme:s},a,(e=>({[n]:e})));!function(){for(var e=arguments.length,t=new Array(e),n=0;ne.concat(Object.keys(t))),[]),o=new Set(r);return t.every((e=>o.size===Object.keys(e).length))}(e,a)?c=(0,o.A)(c,e):c[n]=t({sx:a,theme:s})}else c=(0,o.A)(c,e(n,a,s,u))})),(0,i.vf)(l,c)}return Array.isArray(a)?a.map(c):c(a)}}const u=s();u.filterProps=["sx"];const c=u},410:(e,t,n)=>{"use strict";n.d(t,{A:()=>o});var r=n(6632);function o(e){if("string"!==typeof e)throw new Error((0,r.A)(7));return e.charAt(0).toUpperCase()+e.slice(1)}},578:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>r.A});var r=n(410)},1098:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>r});const r=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:Number.MIN_SAFE_INTEGER,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:Number.MAX_SAFE_INTEGER;return Math.max(t,Math.min(e,n))}},3216:(e,t,n)=>{"use strict";n.d(t,{A:()=>i,Q:()=>o});var r=n(8168);function o(e){if("object"!==typeof e||null===e)return!1;const t=Object.getPrototypeOf(e);return(null===t||t===Object.prototype||null===Object.getPrototypeOf(t))&&!(Symbol.toStringTag in e)&&!(Symbol.iterator in e)}function a(e){if(!o(e))return e;const t={};return Object.keys(e).forEach((n=>{t[n]=a(e[n])})),t}function i(e,t){let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{clone:!0};const l=n.clone?(0,r.A)({},e):e;return o(e)&&o(t)&&Object.keys(t).forEach((r=>{o(t[r])&&Object.prototype.hasOwnProperty.call(e,r)&&o(e[r])?l[r]=i(e[r],t[r],n):n.clone?l[r]=o(t[r])?a(t[r]):t[r]:l[r]=t[r]})),l}},4534:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>r.A,isPlainObject:()=>r.Q});var r=n(3216)},6632:(e,t,n)=>{"use strict";function r(e){let t="https://mui.com/production-error/?code="+e;for(let n=1;nr})},7245:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>r.A});var r=n(6632)},2046:(e,t,n)=>{"use strict";n.r(t),n.d(t,{default:()=>s,getFunctionName:()=>a});var r=n(9565);const o=/^\s*function(?:\s|\s*\/\*.*\*\/\s*)+([^(\s/]*)\s*/;function a(e){const t="".concat(e).match(o);return t&&t[1]||""}function i(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";return e.displayName||e.name||a(e)||t}function l(e,t,n){const r=i(t);return e.displayName||(""!==r?"".concat(n,"(").concat(r,")"):n)}function s(e){if(null!=e){if("string"===typeof e)return e;if("function"===typeof e)return i(e,"Component");if("object"===typeof e)switch(e.$$typeof){case r.ForwardRef:return l(e,e.render,"ForwardRef");case r.Memo:return l(e,e.type,"memo");default:return}}}},8609:(e,t)=>{"use strict";var n,r=Symbol.for("react.element"),o=Symbol.for("react.portal"),a=Symbol.for("react.fragment"),i=Symbol.for("react.strict_mode"),l=Symbol.for("react.profiler"),s=Symbol.for("react.provider"),u=Symbol.for("react.context"),c=Symbol.for("react.server_context"),d=Symbol.for("react.forward_ref"),f=Symbol.for("react.suspense"),p=Symbol.for("react.suspense_list"),h=Symbol.for("react.memo"),m=Symbol.for("react.lazy"),g=Symbol.for("react.offscreen");function v(e){if("object"===typeof e&&null!==e){var t=e.$$typeof;switch(t){case r:switch(e=e.type){case a:case l:case i:case f:case p:return e;default:switch(e=e&&e.$$typeof){case c:case u:case d:case m:case h:case s:return e;default:return t}}case o:return t}}}n=Symbol.for("react.module.reference"),t.ForwardRef=d,t.Memo=h},9565:(e,t,n)=>{"use strict";e.exports=n(8609)},219:(e,t,n)=>{"use strict";var r=n(3763),o={childContextTypes:!0,contextType:!0,contextTypes:!0,defaultProps:!0,displayName:!0,getDefaultProps:!0,getDerivedStateFromError:!0,getDerivedStateFromProps:!0,mixins:!0,propTypes:!0,type:!0},a={name:!0,length:!0,prototype:!0,caller:!0,callee:!0,arguments:!0,arity:!0},i={$$typeof:!0,compare:!0,defaultProps:!0,displayName:!0,propTypes:!0,type:!0},l={};function s(e){return r.isMemo(e)?i:l[e.$$typeof]||o}l[r.ForwardRef]={$$typeof:!0,render:!0,defaultProps:!0,displayName:!0,propTypes:!0},l[r.Memo]=i;var u=Object.defineProperty,c=Object.getOwnPropertyNames,d=Object.getOwnPropertySymbols,f=Object.getOwnPropertyDescriptor,p=Object.getPrototypeOf,h=Object.prototype;e.exports=function e(t,n,r){if("string"!==typeof n){if(h){var o=p(n);o&&o!==h&&e(t,o,r)}var i=c(n);d&&(i=i.concat(d(n)));for(var l=s(t),m=s(n),g=0;g{"use strict";var n="function"===typeof Symbol&&Symbol.for,r=n?Symbol.for("react.element"):60103,o=n?Symbol.for("react.portal"):60106,a=n?Symbol.for("react.fragment"):60107,i=n?Symbol.for("react.strict_mode"):60108,l=n?Symbol.for("react.profiler"):60114,s=n?Symbol.for("react.provider"):60109,u=n?Symbol.for("react.context"):60110,c=n?Symbol.for("react.async_mode"):60111,d=n?Symbol.for("react.concurrent_mode"):60111,f=n?Symbol.for("react.forward_ref"):60112,p=n?Symbol.for("react.suspense"):60113,h=n?Symbol.for("react.suspense_list"):60120,m=n?Symbol.for("react.memo"):60115,g=n?Symbol.for("react.lazy"):60116,v=n?Symbol.for("react.block"):60121,y=n?Symbol.for("react.fundamental"):60117,b=n?Symbol.for("react.responder"):60118,w=n?Symbol.for("react.scope"):60119;function x(e){if("object"===typeof e&&null!==e){var t=e.$$typeof;switch(t){case r:switch(e=e.type){case c:case d:case a:case l:case i:case p:return e;default:switch(e=e&&e.$$typeof){case u:case f:case g:case m:case s:return e;default:return t}}case o:return t}}}function S(e){return x(e)===d}t.AsyncMode=c,t.ConcurrentMode=d,t.ContextConsumer=u,t.ContextProvider=s,t.Element=r,t.ForwardRef=f,t.Fragment=a,t.Lazy=g,t.Memo=m,t.Portal=o,t.Profiler=l,t.StrictMode=i,t.Suspense=p,t.isAsyncMode=function(e){return S(e)||x(e)===c},t.isConcurrentMode=S,t.isContextConsumer=function(e){return x(e)===u},t.isContextProvider=function(e){return x(e)===s},t.isElement=function(e){return"object"===typeof e&&null!==e&&e.$$typeof===r},t.isForwardRef=function(e){return x(e)===f},t.isFragment=function(e){return x(e)===a},t.isLazy=function(e){return x(e)===g},t.isMemo=function(e){return x(e)===m},t.isPortal=function(e){return x(e)===o},t.isProfiler=function(e){return x(e)===l},t.isStrictMode=function(e){return x(e)===i},t.isSuspense=function(e){return x(e)===p},t.isValidElementType=function(e){return"string"===typeof e||"function"===typeof e||e===a||e===d||e===l||e===i||e===p||e===h||"object"===typeof e&&null!==e&&(e.$$typeof===g||e.$$typeof===m||e.$$typeof===s||e.$$typeof===u||e.$$typeof===f||e.$$typeof===y||e.$$typeof===b||e.$$typeof===w||e.$$typeof===v)},t.typeOf=x},3763:(e,t,n)=>{"use strict";e.exports=n(4983)},2730:(e,t,n)=>{"use strict";var r=n(5043),o=n(8853);function a(e){for(var t="https://reactjs.org/docs/error-decoder.html?invariant="+e,n=1;n