summaryrefslogtreecommitdiff
path: root/code/test/vid
diff options
context:
space:
mode:
Diffstat (limited to 'code/test/vid')
-rw-r--r--code/test/vid/Makefile22
-rw-r--r--code/test/vid/cap.c576
-rwxr-xr-xcode/test/vid/cap.sh3
-rw-r--r--code/test/vid/client.c123
-rw-r--r--code/test/vid/display.c68
-rw-r--r--code/test/vid/display.h17
-rw-r--r--code/test/vid/enc.c168
-rw-r--r--code/test/vid/enc.h7
-rw-r--r--code/test/vid/server.c76
-rw-r--r--code/test/vid/server.h7
-rw-r--r--code/test/vid/tools.c165
-rw-r--r--code/test/vid/tools.h37
12 files changed, 1269 insertions, 0 deletions
diff --git a/code/test/vid/Makefile b/code/test/vid/Makefile
new file mode 100644
index 0000000..03fdaba
--- /dev/null
+++ b/code/test/vid/Makefile
@@ -0,0 +1,22 @@
+include ../../ecp/Makefile.platform
+
+LIBVPX_HOME=/opt/my/libvpx
+CFLAGS=$(CFLAGS_PL) -D_V4L2_KERNEL_ -I/usr/src/linux-headers-$(uname -r) -I$(LIBVPX_HOME) -I/opt/local/include/SDL2 -I../../ecp -I../../util -Wno-int-to-void-pointer-cast
+LDFLAGS=$(LDFLAGS_PL) -L$(LIBVPX_HOME) -L/opt/local/lib
+dep=../init_vconn.o ../../ecp/build-posix/*.a ../../util/libecputil.a
+
+all: cap
+
+%.o: %.c
+ $(CC) $(CFLAGS) -c $<
+
+cap: cap.o enc.o tools.o server.o
+ $(CC) -o $@ $< enc.o tools.o server.o $(dep) -lvpx $(LDFLAGS)
+
+client: client.o display.o tools.o
+ $(CC) -o $@ $< display.o tools.o $(dep) -lvpx -lSDL2 $(LDFLAGS)
+
+clean:
+ rm -f *.o
+ rm -f cap client
+
diff --git a/code/test/vid/cap.c b/code/test/vid/cap.c
new file mode 100644
index 0000000..e37c28f
--- /dev/null
+++ b/code/test/vid/cap.c
@@ -0,0 +1,576 @@
+/*
+ * OpenCV BSD3 License
+ * videodev2.h BSD License (dual license)
+ * If you raise some license issue here simply don't use it and let us know
+ *
+ * Copyright (C) 2016 the contributors
+ *
+ * Luiz Vitor Martinez Cardoso
+ * Jules Thuillier
+ * @lex (avafinger)
+ *
+ * gcc cap.c -o cap $(pkg-config --libs --cflags opencv) -lm
+ *
+ * gcc -I/usr/src/linux-headers-VERSION/ cap.c -o cap $(pkg-config --libs --cflags opencv) -lm -O3
+ *
+*/
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <stdint.h>
+#include <unistd.h>
+#include <malloc.h>
+#include <string.h>
+#include <sys/mman.h>
+#include <sys/time.h>
+#include <sys/ioctl.h>
+
+/* -----------------------------------------------------------------------------
+ * BananaPi M64 / Pine64+ (A64) or if you want to control Exposure,Hflip,Vflip
+ * -----------------------------------------------------------------------------
+ * _V4L2_KERNEL_ should be defined and point to: /usr/src/linux-headers-version
+ *
+ * build with: gcc -I/usr/src/linux-headers-3.10.102/ cap.c -o cap $(pkg-config --libs --cflags opencv) -lm -O3
+ *
+ *
+ * -----------------------------------------------------------------------------
+ * OrangePi / BananaPi / NanoPi (H3) / BPI-M3 (A83T - ov5640 & ov8865)
+ * -----------------------------------------------------------------------------
+ * _V4L2_KERNEL_ should not be defined unless you want Exposure, Hflip and Vflip
+ *
+ * build with: gcc cap.c -o cap $(pkg-config --libs --cflags opencv) -lm
+ *
+ *
+*/
+//#define _V4L2_KERNEL_ // BananaPi M64 / Pine64+ only or for setting Exposure,Hflip,Vflip
+
+#ifdef _V4L2_KERNEL_
+/* --- A64 --- */
+#include <linux/videodev2.h>
+#else
+/* --- H3 / A83T --- */
+#include "videodev2.h"
+#endif
+
+#ifdef _V4L2_KERNEL_
+#define V4L2_MODE_VIDEO 0x0002 /* video capture */
+#define V4L2_MODE_IMAGE 0x0003 /* image capture */
+#define V4L2_MODE_PREVIEW 0x0004 /* preview capture */
+#endif
+
+#define N_BUFFERS 4
+#define CAP_OK 0
+#define CAP_ERROR -1
+#define CAP_ERROR_RET(s) { \
+ fprintf(stderr, "v4l2: %s\n", s); \
+ return CAP_ERROR; \
+ }
+#define CAP_CLIP(val, min, max) (((val) > (max)) ? (max) : (((val) < (min)) ? (min) : (val)))
+
+#define CLEAR(x) memset (&(x), 0, sizeof (x))
+#define ALIGN_4K(x) (((x) + (4095)) & ~(4095))
+#define ALIGN_16B(x) (((x) + (15)) & ~(15))
+
+#include "server.h"
+#include "enc.h"
+
+typedef struct {
+ void *start;
+ size_t length;
+} v4l2_buffer_t;
+
+int width;
+int height;
+v4l2_buffer_t *buffers = NULL;
+int n_buffers = N_BUFFERS;
+int sensor_video_mode;
+int sensor_exposure;
+int sensor_hflip;
+int sensor_vflip;
+
+double get_wall_time()
+{
+ struct timeval time;
+ if (gettimeofday(&time, NULL))
+ return 0.;
+ return (double) time.tv_sec + (double) time.tv_usec * .000001;
+}
+
+int yuv420p_to_bgr(void *in, int length, unsigned char *out)
+{
+ uint8_t *yptr, *uptr, *vptr;
+ uint32_t x, y, p;
+
+ if (length < (width * height * 3) / 2)
+ return CAP_ERROR;
+
+ yptr = (uint8_t *) in;
+ uptr = yptr + (width * height);
+ vptr = uptr + (width * height / 4);
+ p = 0;
+
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ int r, g, b;
+ int y, u, v;
+
+ y = *(yptr++) << 8;
+ u = uptr[p] - 128;
+ v = vptr[p] - 128;
+
+ r = (y + (359 * v)) >> 8;
+ g = (y - (88 * u) - (183 * v)) >> 8;
+ b = (y + (454 * u)) >> 8;
+
+ *(out++) += CAP_CLIP(b, 0x00, 0xFF);
+ *(out++) += CAP_CLIP(g, 0x00, 0xFF);
+ *(out++) += CAP_CLIP(r, 0x00, 0xFF);
+
+ if (x & 1)
+ p++;
+ }
+
+ if (!(y & 1))
+ p -= width / 2;
+ }
+
+ return CAP_ERROR;
+}
+
+static int xioctl(int fd, int request, void *arg)
+{
+ int r;
+ int tries = 3;
+
+ do {
+ r = ioctl(fd, request, arg);
+ } while (--tries > 0 && -1 == r && EINTR == errno);
+
+ return r;
+}
+
+int v4l2_display_sizes_pix_format(int fd)
+{
+ int ret = 0;
+ int fsizeind = 0; /*index for supported sizes*/
+ struct v4l2_frmsizeenum fsize;
+
+ fprintf(stderr, "V4L2 pixel sizes:\n");
+
+ CLEAR(fsize);
+ fsize.index = 0;
+ fsize.pixel_format = V4L2_PIX_FMT_YUV420;
+
+ while ((ret = xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &fsize)) == 0) {
+ fsize.index++;
+ if (fsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ fprintf(stderr, "( %u x %u ) Pixels\n", fsize.discrete.width, fsize.discrete.height);
+ fsizeind++;
+ }
+ }
+ return fsizeind;
+}
+
+int v4l2_display_pix_format(int fd)
+{
+ struct v4l2_fmtdesc fmt;
+ int index;
+
+ fprintf(stderr, "V4L2 pixel formats:\n");
+
+ index = 0;
+ CLEAR(fmt);
+ fmt.index = index;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ while (ioctl(fd, VIDIOC_ENUM_FMT, &fmt) != -1) {
+ fprintf(stderr, "%i: [0x%08X] '%c%c%c%c' (%s)\n", index, fmt.pixelformat, fmt.pixelformat >> 0, fmt.pixelformat >> 8, fmt.pixelformat >> 16, fmt.pixelformat >> 24, fmt.description);
+
+ memset(&fmt, 0, sizeof(fmt));
+ fmt.index = ++index;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ }
+ // fprintf(stderr, "\n");
+}
+
+#ifdef _V4L2_KERNEL_
+int v4l2_set_exposure(int fd, int exposure)
+{
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+ int rc;
+
+ fprintf(stderr, "set Exposure: %d\n", exposure);
+ rc = 0;
+ memset(&control, 0, sizeof(control));
+ control.id = V4L2_CID_EXPOSURE;
+ rc = xioctl(fd, VIDIOC_G_CTRL, &control);
+ fprintf(stderr, "rc: %d - get exposure: %d\n", rc, control.value);
+ control.value = exposure;
+ rc = xioctl(fd, VIDIOC_S_CTRL, &control);
+ fprintf(stderr, "rc: %d - new exposure: %d\n", rc, exposure);
+ return rc;
+}
+
+int v4l2_set_hflip(int fd, int hflip)
+{
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+ int rc;
+
+ fprintf(stderr, "set Hflip: %d\n", hflip);
+ rc = 0;
+ memset(&control, 0, sizeof(control));
+ control.id = V4L2_CID_HFLIP;
+ rc = xioctl(fd, VIDIOC_G_CTRL, &control);
+ fprintf(stderr, "rc: %d - get value: %d\n", rc, control.value);
+ control.value = hflip;
+ rc = xioctl(fd, VIDIOC_S_CTRL, &control);
+ fprintf(stderr, "rc: %d - new value: %d\n", rc, control.value);
+ return rc;
+}
+
+int v4l2_set_vflip(int fd, int vflip)
+{
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+ int rc;
+
+ fprintf(stderr, "set Vflip: %d\n", vflip);
+ rc = 0;
+ memset(&control, 0, sizeof(control));
+ control.id = V4L2_CID_VFLIP;
+ rc = xioctl(fd, VIDIOC_G_CTRL, &control);
+ fprintf(stderr, "rc: %d - get value: %d\n", rc, control.value);
+ control.value = vflip;
+ rc = xioctl(fd, VIDIOC_S_CTRL, &control);
+ fprintf(stderr, "rc: %d - new value: %d\n", rc, control.value);
+ return rc;
+}
+#endif
+
+int v4l2_init_camera(int fd)
+{
+ uint32_t i;
+ uint32_t index;
+ struct v4l2_streamparm parms;
+ struct v4l2_format fmt;
+ struct v4l2_input input;
+ struct v4l2_capability caps;
+
+ CLEAR(fmt);
+ CLEAR(input);
+ CLEAR(caps);
+
+
+ if (xioctl(fd, VIDIOC_QUERYCAP, &caps) == -1) {
+ CAP_ERROR_RET("unable to query capabilities.");
+ }
+
+ if (!(caps.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
+ CAP_ERROR_RET("doesn't support video capturing.");
+ }
+
+ fprintf(stderr, "Driver: \"%s\"\n", caps.driver);
+ fprintf(stderr, "Card: \"%s\"\n", caps.card);
+ fprintf(stderr, "Bus: \"%s\"\n", caps.bus_info);
+ fprintf(stderr, "Version: %d.%d\n", (caps.version >> 16) && 0xff, (caps.version >> 24) && 0xff);
+ fprintf(stderr, "Capabilities: %08x\n", caps.capabilities);
+
+ input.index = 0;
+ if (xioctl(fd, VIDIOC_ENUMINPUT, &input) == -1) {
+ CAP_ERROR_RET("unable to enumerate input.");
+ }
+
+ fprintf(stderr, "Input: %d\n", input.index);
+ if (xioctl(fd, VIDIOC_S_INPUT, &input.index) == -1) {
+ CAP_ERROR_RET("unable to set input.");
+ }
+
+ parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ parms.parm.capture.capturemode = sensor_video_mode ? V4L2_MODE_VIDEO : V4L2_MODE_IMAGE;
+ parms.parm.capture.timeperframe.numerator = 1;
+ parms.parm.capture.timeperframe.denominator = sensor_video_mode ? 30 : 7;
+ if (-1 == xioctl(fd, VIDIOC_S_PARM, &parms)) {
+ CAP_ERROR_RET("unable to set stream parm.");
+ }
+
+ v4l2_display_pix_format(fd);
+ v4l2_display_sizes_pix_format(fd);
+ fprintf(stderr, "\n");
+
+ fmt.fmt.pix.width = width;
+ fmt.fmt.pix.height = height;
+ fmt.fmt.pix.field = V4L2_FIELD_NONE; // V4L2_FIELD_ANY;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
+ //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
+
+ if (xioctl(fd, VIDIOC_TRY_FMT, &fmt) == -1) {
+ CAP_ERROR_RET("failed trying to set pixel format.");
+ }
+
+ if (fmt.fmt.pix.width != width || fmt.fmt.pix.height != height) {
+ width = fmt.fmt.pix.width;
+ height = fmt.fmt.pix.height;
+ fprintf(stderr, "Sensor size adjusted to: %dx%d pixels\n", width, height);
+ } else {
+ fprintf(stderr, "Sensor size: %dx%d pixels\n", width, height);
+ }
+
+ if (xioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {
+ CAP_ERROR_RET("failed to set pixel format.");
+ }
+
+ switch (fmt.fmt.pix.pixelformat) {
+ case V4L2_PIX_FMT_RGB24:
+ fprintf(stderr, "Pixel Format: V4L2_PIX_FMT_RGB24 [0x%08X]\n",fmt.fmt.pix.pixelformat);
+ break;
+
+ case V4L2_PIX_FMT_YUV420:
+ fprintf(stderr, "Pixel Format: V4L2_PIX_FMT_YUV420 [0x%08X]\n",fmt.fmt.pix.pixelformat);
+ break;
+
+ }
+
+ return CAP_OK;
+}
+
+int v4l2_set_mmap(int fd, int *buffers_count)
+{
+ int i;
+ int nbf;
+ enum v4l2_buf_type type;
+ struct v4l2_requestbuffers req;
+ struct v4l2_buffer buf;
+
+ CLEAR(req);
+ req.count = sensor_video_mode ? n_buffers : 1;
+ req.memory = V4L2_MEMORY_MMAP;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (xioctl(fd, VIDIOC_REQBUFS, &req) == -1) {
+ CAP_ERROR_RET("failed requesting buffers.");
+ }
+ nbf = req.count;
+ if (n_buffers != nbf) {
+ CAP_ERROR_RET("insufficient buffer memory.");
+ }
+
+ buffers = (v4l2_buffer_t *) calloc(nbf, sizeof(v4l2_buffer_t));
+ if (!buffers) {
+ CAP_ERROR_RET("failed to allocated buffers memory.");
+ }
+
+ for (i = 0; i < nbf; i++) {
+ CLEAR(buf);
+ buf.index = i;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (xioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {
+ CAP_ERROR_RET("failed to query buffer.");
+ }
+ buffers[i].length = buf.length;
+ buffers[i].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
+ if (MAP_FAILED == buffers[i].start) {
+ CAP_ERROR_RET("failed to mmap buffer.");
+ }
+ }
+
+ for (i = 0; i < nbf; i++) {
+ CLEAR(buf);
+ buf.index = i;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (xioctl(fd, VIDIOC_QBUF, &buf) == -1) {
+ CAP_ERROR_RET("failed to queue buffer.");
+ }
+ }
+
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (xioctl(fd, VIDIOC_STREAMON, &type) == -1) {
+ CAP_ERROR_RET("failed to stream on.");
+ }
+
+ *buffers_count = nbf;
+
+ return CAP_OK;
+}
+
+int v4l2_retrieve_frame(int fd, int buffers_count, vpx_image_t *raw, vpx_codec_ctx_t *codec, int frame_index, int kframe_interval)
+{
+ int sz;
+ fd_set fds;
+ unsigned char *frame_yuv;
+ struct timeval tv;
+ struct v4l2_buffer buf;
+ int rc;
+ char err_msg[128];
+ int flags = 0;
+
+ CLEAR(tv);
+ CLEAR(buf);
+
+ rc = 1;
+ while (rc > 0) {
+ FD_ZERO(&fds);
+ FD_SET(fd, &fds);
+
+ tv.tv_sec = 2;
+ tv.tv_usec = 0;
+
+ rc = select(fd + 1, &fds, NULL, NULL, &tv);
+ if (-1 == rc) {
+ if (EINTR == errno) {
+ rc = 1; // try again
+ continue;
+ }
+ CAP_ERROR_RET("failed to select frame.");
+ }
+ /* we got something */
+ break;
+ }
+ if (rc <= 0) {
+ sprintf(err_msg, "errno: %d - check sensor, something wrong.", errno);
+ CAP_ERROR_RET(err_msg);
+ }
+
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (xioctl(fd, VIDIOC_DQBUF, &buf) == -1) {
+ CAP_ERROR_RET("failed to retrieve frame.");
+ }
+
+ // fprintf(stderr, "Length: %d \tBytesused: %d \tAddress: %p\n", buf.length, buf.bytesused, &buffers[buf.index]);
+
+ sz = ALIGN_16B(width) * height * 3 / 2;
+ if (!vpx_img_read(raw, buffers[buf.index].start, sz)) {
+ die_codec(NULL, "Failed to read image.");
+ }
+ if (frame_index % kframe_interval == 0) flags |= VPX_EFLAG_FORCE_KF;
+ vpx_encode_frame(codec, raw, frame_index, flags);
+
+ if (xioctl(fd, VIDIOC_QBUF, &buf) == -1) {
+ CAP_ERROR_RET("failed to queue buffer.");
+ }
+
+ return CAP_OK;
+}
+
+int v4l2_close_camera(int fd, int buffers_count)
+{
+ int i;
+ enum v4l2_buf_type type;
+
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (xioctl(fd, VIDIOC_STREAMOFF, &type) == -1) {
+ CAP_ERROR_RET("failed to stream off.");
+ }
+
+ for (i = 0; i < buffers_count; i++)
+ munmap(buffers[i].start, buffers[i].length);
+
+ close(fd);
+}
+
+int main(int argc, char *argv[])
+{
+ int fd;
+ int target_bitrate = 200;
+ double after;
+ double before;
+ double avg, fps;
+ int buffers_count;
+ int kframe_interval;
+ vpx_codec_er_flags_t err_resilient;
+ char *address;
+ char *my_key;
+ char *vcs_key;
+
+ if (argc != 14) {
+ CAP_ERROR_RET("./cap <width> <height> <buffers [4,8]> <video mode [0,1]> <exposure [-4,4]> <hflip [0,1]> <vflip [0,1]> <kframe interval> <bitrate> <err resilient> <address> <my key> <vcs pub key>")
+ }
+ width = (int) atoi(argv[1]);
+ height = (int) atoi(argv[2]);
+ n_buffers = (int) atoi(argv[3]);
+ if (n_buffers < 4)
+ n_buffers = 4;
+ if (n_buffers > 8)
+ n_buffers = 8; // enough in VIDEO MODE!!!
+
+ sensor_video_mode = (int) atoi(argv[4]);
+ if (!sensor_video_mode)
+ n_buffers = 1;
+ sensor_exposure = (int) atoi(argv[5]);
+ sensor_hflip = (int) atoi(argv[6]);
+ sensor_vflip = (int) atoi(argv[7]);
+ kframe_interval = (int) atoi(argv[8]);
+ target_bitrate = (int) atoi(argv[9]);
+ err_resilient = strtoul(argv[10], NULL, 0);
+ address = argv[11];
+ my_key = argv[12];
+ vcs_key = argv[13];
+
+ fprintf(stderr, "---- cap parameters -----\nwidth: %d\nheight: %d\nv4l2 buffers: %d\nexposure: %d\nhflip: %d\nvflip: %d\nMode: %s\n", width, height, n_buffers, sensor_exposure, sensor_hflip, sensor_vflip, sensor_video_mode ? "V4L2_MODE_VIDEO" : "V4L2_MODE_IMAGE");
+
+ fd = open("/dev/video0", O_RDWR | O_NONBLOCK);
+ if (fd == -1) {
+ CAP_ERROR_RET("failed to open the camera.");
+ }
+
+ if (v4l2_init_camera(fd) == -1) {
+ CAP_ERROR_RET("failed to init camera.");
+ }
+#ifdef _V4L2_KERNEL_
+ if (sensor_exposure != -999) {
+ v4l2_set_exposure(fd, sensor_exposure);
+ }
+ if (sensor_hflip != -1) {
+ v4l2_set_hflip(fd, sensor_hflip);
+ }
+
+ if (sensor_vflip != -1) {
+ v4l2_set_vflip(fd, sensor_vflip);
+ }
+#endif
+
+ if (v4l2_set_mmap(fd, &buffers_count) == -1) {
+ CAP_ERROR_RET("failed to mmap.");
+ }
+
+ int n = 0;
+ int _fps = 30;
+ const char *codec_arg = "vp9";
+ vpx_codec_ctx_t codec;
+ vpx_image_t raw;
+
+ vpx_open(codec_arg, width, height, _fps, target_bitrate, err_resilient, &codec, &raw);
+ init_server(address, my_key, vcs_key);
+
+ while (1) {
+ if (!conn_is_open()) {
+ sleep(1);
+ continue;
+ }
+ before = get_wall_time();
+ if (v4l2_retrieve_frame(fd, buffers_count, &raw, &codec, n, kframe_interval)) {
+ CAP_ERROR_RET("failed to retrieve frame.");
+ }
+ after = get_wall_time();
+ fps = 1.0 / (after - before);
+ avg += fps;
+ n++;
+ // fprintf(stderr, "FPS[%d]: %.2f\n", i, fps);
+ }
+
+ vpx_close(&codec, &raw);
+ v4l2_close_camera(fd, buffers_count);
+
+ if (n) {
+ fprintf(stderr, "\n------- Avg FPS: %.2f --------\n\n", (double) (avg / (double) n));
+ }
+
+ return CAP_OK;
+}
diff --git a/code/test/vid/cap.sh b/code/test/vid/cap.sh
new file mode 100755
index 0000000..bfd933e
--- /dev/null
+++ b/code/test/vid/cap.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+./cap 640 480 8 1 -999 -1 -1 25 256 1 0.0.0.0:3000 ../../keys/pine64-home.priv ../../keys/majstor.org.pub
diff --git a/code/test/vid/client.c b/code/test/vid/client.c
new file mode 100644
index 0000000..017967b
--- /dev/null
+++ b/code/test/vid/client.c
@@ -0,0 +1,123 @@
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <stdlib.h>
+
+#include "vpx/vpx_decoder.h"
+#include "vpx/vp8cx.h"
+#include "tools.h"
+#include "display.h"
+
+#include "core.h"
+#include "vconn/vconn.h"
+#include "util.h"
+
+#define CTYPE_TEST 0
+#define MTYPE_MSG 8
+
+#define FRAG_BUF_SIZE 8192
+#define RBUF_MSG_SIZE 128
+
+ECPContext ctx;
+ECPSocket sock;
+ECPConnHandler handler;
+
+ECPNode node;
+ECPConnection conn;
+
+ECPVConnection vconn;
+ECPNode vconn_node;
+
+vpx_codec_ctx_t codec;
+
+ECPRBRecv rbuf_recv;
+ECPRBMessage rbuf_r_msg[RBUF_MSG_SIZE];
+ECPFragIter frag_iter;
+unsigned char frag_buffer[FRAG_BUF_SIZE];
+
+SDLCanvas sdl_canvas;
+
+ssize_t handle_msg(ECPConnection *conn, ecp_seq_t sq, unsigned char t, unsigned char *f, ssize_t sz, ECP2Buffer *b) {
+ vpx_codec_iter_t iter = NULL;
+ vpx_image_t *img = NULL;
+
+ if (vpx_codec_decode(&codec, f, (unsigned int)sz, NULL, 0)) {
+ fprintf(stderr, "\n%lu\n", sz);
+ fprintf(stderr, "ERROR!\n");
+ // die_codec(&codec, "Failed to decode frame.");
+ }
+
+ while ((img = vpx_codec_get_frame(&codec, &iter)) != NULL) {
+ if (!vpx_img_write(img, sdl_canvas.yuvBuffer, sdl_canvas.yPlaneSz + 2 * sdl_canvas.uvPlaneSz)) die_codec(NULL, "Failed to write image.");
+ }
+ sdl_display_frame(&sdl_canvas);
+
+ return sz;
+}
+
+static void usage(char *arg) {
+ fprintf(stderr, "Usage: %s <node.pub> <vcs.pub>\n", arg);
+ exit(1);
+}
+
+int main(int argc, char *argv[]) {
+ int rv;
+
+ if (argc != 3) usage(argv[0]);
+
+ rv = ecp_init(&ctx);
+ fprintf(stderr, "ecp_init RV:%d\n", rv);
+
+ if (!rv) rv = ecp_conn_handler_init(&handler);
+ if (!rv) {
+ handler.msg[MTYPE_MSG] = handle_msg;
+ ctx.handler[CTYPE_TEST] = &handler;
+ }
+
+ if (!rv) rv = ecp_sock_create(&sock, &ctx, NULL);
+ fprintf(stderr, "ecp_sock_create RV:%d\n", rv);
+
+ if (!rv) rv = ecp_sock_open(&sock, NULL);
+ fprintf(stderr, "ecp_sock_open RV:%d\n", rv);
+
+ if (!rv) rv = ecp_start_receiver(&sock);
+ fprintf(stderr, "ecp_start_receiver RV:%d\n", rv);
+
+ if (!rv) rv = ecp_util_node_load(&ctx, &node, argv[1]);
+ fprintf(stderr, "ecp_util_node_load RV:%d\n", rv);
+
+ if (!rv) rv = ecp_util_node_load(&ctx, &vconn_node, argv[2]);
+ printf("ecp_util_node_load RV:%d\n", rv);
+
+ if (!rv) rv = ecp_conn_create(&conn, &sock, CTYPE_TEST);
+ fprintf(stderr, "ecp_conn_create RV:%d\n", rv);
+
+ if (!rv) rv = ecp_rbuf_create(&conn, NULL, NULL, 0, &rbuf_recv, rbuf_r_msg, RBUF_MSG_SIZE);
+ fprintf(stderr, "ecp_rbuf_create RV:%d\n", rv);
+
+ if (!rv) {
+ ecp_frag_iter_init(&frag_iter, frag_buffer, FRAG_BUF_SIZE);
+ rbuf_recv.frag_iter = &frag_iter;
+ }
+
+ const char *codec_arg = "vp9";
+ const VpxInterface *decoder = get_vpx_decoder_by_name(codec_arg);
+ if (!decoder) die_codec(NULL, "Unknown input codec.");
+
+ fprintf(stderr, "Using %s\n", vpx_codec_iface_name(decoder->codec_interface()));
+
+ if (vpx_codec_dec_init(&codec, decoder->codec_interface(), NULL, 0))
+ die_codec(&codec, "Failed to initialize decoder.");
+
+ sdl_open(&sdl_canvas, 640, 480);
+
+ // if (!rv) rv = ecp_conn_open(&conn, &node);
+ // fprintf(stderr, "ecp_conn_open RV:%d\n", rv);
+
+ if (!rv) rv = ecp_vconn_open(&conn, &node, &vconn, &vconn_node, 1);
+ printf("ecp_vconn_open RV:%d\n", rv);
+
+ sdl_loop();
+ sdl_close(&sdl_canvas);
+ if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec");
+} \ No newline at end of file
diff --git a/code/test/vid/display.c b/code/test/vid/display.c
new file mode 100644
index 0000000..d378431
--- /dev/null
+++ b/code/test/vid/display.c
@@ -0,0 +1,68 @@
+#include <stdio.h>
+
+#include "display.h"
+
+void sdl_open(SDLCanvas *o, int img_width, int img_height) {
+ // Initialize the SDL
+ if (SDL_Init(SDL_INIT_VIDEO) != 0) {
+ fprintf(stderr, "SDL_Init() Failed: %s\n", SDL_GetError());
+ exit(1);
+ }
+
+ o->display = SDL_CreateWindow("SDL Tutorial",
+ SDL_WINDOWPOS_UNDEFINED,
+ SDL_WINDOWPOS_UNDEFINED,
+ img_width, img_height, 0);
+ if (o->display == NULL) {
+ fprintf(stderr, "SDL_SetVideoMode() Failed: %s\n", SDL_GetError());
+ exit(1);
+ }
+
+ o->renderer = SDL_CreateRenderer(o->display, -1, 0);
+ if (o->renderer == NULL) {
+ fprintf(stderr, "SDL_CreateRenderer() Failed: %s\n", SDL_GetError());
+ exit(1);
+ }
+
+ o->texture = SDL_CreateTexture(o->renderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, img_width, img_height);
+ if (o->texture == NULL) {
+ fprintf(stderr, "SDL_CreateTextureFromSurface() Failed: %s\n", SDL_GetError());
+ exit(1);
+ }
+
+ o->yPlaneSz = img_width * img_height;
+ o->uvPlaneSz = img_width * img_height / 4;
+ o->yuvBuffer = (Uint8*)malloc(o->yPlaneSz + 2 * o->uvPlaneSz);
+ o->yPitch = img_width;
+ o->uvPitch = img_width / 2;
+}
+
+void sdl_close(SDLCanvas *o) {
+ free(o->yuvBuffer);
+ SDL_DestroyTexture(o->texture);
+ SDL_DestroyRenderer(o->renderer);
+ SDL_DestroyWindow(o->display);
+ SDL_Quit();
+}
+
+void sdl_display_frame(SDLCanvas *o) {
+ SDL_UpdateYUVTexture(o->texture, NULL, o->yuvBuffer, o->yPitch, o->yuvBuffer + o->yPlaneSz, o->uvPitch, o->yuvBuffer + o->yPlaneSz + o->uvPlaneSz, o->uvPitch);
+ SDL_RenderClear(o->renderer);
+ SDL_RenderCopy(o->renderer, o->texture, NULL, NULL);
+ SDL_RenderPresent(o->renderer);
+}
+
+void sdl_loop(void) {
+ SDL_Event event;
+
+ while(1) {
+ // Check for messages
+ if (SDL_PollEvent(&event)) {
+ // Check for the quit message
+ if (event.type == SDL_QUIT) {
+ // Quit the program
+ break;
+ }
+ }
+ }
+}
diff --git a/code/test/vid/display.h b/code/test/vid/display.h
new file mode 100644
index 0000000..38e07be
--- /dev/null
+++ b/code/test/vid/display.h
@@ -0,0 +1,17 @@
+#include "SDL.h"
+
+typedef struct SDLCanvas {
+ SDL_Window *display;
+ SDL_Renderer *renderer;
+ SDL_Texture *texture;
+ Uint8 *yuvBuffer;
+ size_t yPlaneSz;
+ size_t uvPlaneSz;
+ int yPitch;
+ int uvPitch;
+} SDLCanvas;
+
+void sdl_open(SDLCanvas *o, int img_width, int img_height);
+void sdl_close(SDLCanvas *o);
+void sdl_display_frame(SDLCanvas *o);
+void sdl_loop(void);
diff --git a/code/test/vid/enc.c b/code/test/vid/enc.c
new file mode 100644
index 0000000..34d0be8
--- /dev/null
+++ b/code/test/vid/enc.c
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Simple Encoder
+// ==============
+//
+// This is an example of a simple encoder loop. It takes an input file in
+// YV12 format, passes it through the encoder, and writes the compressed
+// frames to disk in IVF format. Other decoder examples build upon this
+// one.
+//
+// The details of the IVF format have been elided from this example for
+// simplicity of presentation, as IVF files will not generally be used by
+// your application. In general, an IVF file consists of a file header,
+// followed by a variable number of frames. Each frame consists of a frame
+// header followed by a variable length payload. The length of the payload
+// is specified in the first four bytes of the frame header. The payload is
+// the raw compressed data.
+//
+// Standard Includes
+// -----------------
+// For encoders, you only have to include `vpx_encoder.h` and then any
+// header files for the specific codecs you use. In this case, we're using
+// vp8.
+//
+// Getting The Default Configuration
+// ---------------------------------
+// Encoders have the notion of "usage profiles." For example, an encoder
+// may want to publish default configurations for both a video
+// conferencing application and a best quality offline encoder. These
+// obviously have very different default settings. Consult the
+// documentation for your codec to see if it provides any default
+// configurations. All codecs provide a default configuration, number 0,
+// which is valid for material in the vacinity of QCIF/QVGA.
+//
+// Updating The Configuration
+// ---------------------------------
+// Almost all applications will want to update the default configuration
+// with settings specific to their usage. Here we set the width and height
+// of the video file to that specified on the command line. We also scale
+// the default bitrate based on the ratio between the default resolution
+// and the resolution specified on the command line.
+//
+// Initializing The Codec
+// ----------------------
+// The encoder is initialized by the following code.
+//
+// Encoding A Frame
+// ----------------
+// The frame is read as a continuous block (size width * height * 3 / 2)
+// from the input file. If a frame was read (the input file has not hit
+// EOF) then the frame is passed to the encoder. Otherwise, a NULL
+// is passed, indicating the End-Of-Stream condition to the encoder. The
+// `frame_cnt` is reused as the presentation time stamp (PTS) and each
+// frame is shown for one frame-time in duration. The flags parameter is
+// unused in this example. The deadline is set to VPX_DL_REALTIME to
+// make the example run as quickly as possible.
+
+// Forced Keyframes
+// ----------------
+// Keyframes can be forced by setting the VPX_EFLAG_FORCE_KF bit of the
+// flags passed to `vpx_codec_control()`. In this example, we force a
+// keyframe every <keyframe-interval> frames. Note, the output stream can
+// contain additional keyframes beyond those that have been forced using the
+// VPX_EFLAG_FORCE_KF flag because of automatic keyframe placement by the
+// encoder.
+//
+// Processing The Encoded Data
+// ---------------------------
+// Each packet of type `VPX_CODEC_CX_FRAME_PKT` contains the encoded data
+// for this frame. We write a IVF frame header, followed by the raw data.
+//
+// Cleanup
+// -------
+// The `vpx_codec_destroy` call frees any memory allocated by the codec.
+//
+// Error Handling
+// --------------
+// This example does not special case any error return codes. If there was
+// an error, a descriptive message is printed and the program exits. With
+// few exeptions, vpx_codec functions return an enumerated error status,
+// with the value `0` indicating success.
+//
+// Error Resiliency Features
+// -------------------------
+// Error resiliency is controlled by the g_error_resilient member of the
+// configuration structure. Use the `decode_with_drops` example to decode with
+// frames 5-10 dropped. Compare the output for a file encoded with this example
+// versus one encoded with the `simple_encoder` example.
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "enc.h"
+#include "server.h"
+
+int vpx_encode_frame(vpx_codec_ctx_t *codec, vpx_image_t *img, int frame_index, int flags) {
+ int got_pkts = 0;
+ vpx_codec_iter_t iter = NULL;
+ const vpx_codec_cx_pkt_t *pkt = NULL;
+ const vpx_codec_err_t res =
+ vpx_codec_encode(codec, img, frame_index, 1, flags, VPX_DL_REALTIME);
+ if (res != VPX_CODEC_OK) die_codec(codec, "Failed to encode frame");
+
+ while ((pkt = vpx_codec_get_cx_data(codec, &iter)) != NULL) {
+ got_pkts = 1;
+
+ if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
+ const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
+ if (send_frame(pkt->data.frame.buf, pkt->data.frame.sz, pkt->data.frame.pts) < 0) {
+ die_codec(codec, "Failed to write compressed frame");
+ }
+ fprintf(stderr, keyframe ? "K" : ".");
+ fflush(stdout);
+ }
+ }
+
+ return got_pkts;
+}
+
+
+void vpx_open(const char *codec_arg, int width, int height, int fps, int bitrate, vpx_codec_er_flags_t err_resilient, vpx_codec_ctx_t *codec, vpx_image_t *raw) {
+ vpx_codec_enc_cfg_t cfg;
+ vpx_codec_err_t res;
+
+ const VpxInterface *encoder = get_vpx_encoder_by_name(codec_arg);
+ if (!encoder) die_codec(NULL, "Unsupported codec.");
+
+ fprintf(stderr, "Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));
+
+ if (!vpx_img_alloc(raw, VPX_IMG_FMT_I420, width, height, 1))
+ die_codec(NULL, "Failed to allocate image.");
+
+ res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
+ if (res) die_codec(NULL, "Failed to get default codec config.");
+
+ cfg.g_w = width;
+ cfg.g_h = height;
+ cfg.g_timebase.num = 1;
+ cfg.g_timebase.den = fps;
+ cfg.rc_target_bitrate = bitrate;
+ cfg.g_error_resilient = err_resilient;
+ cfg.g_lag_in_frames = 0;
+ cfg.rc_end_usage = VPX_CBR;
+
+ if (vpx_codec_enc_init(codec, encoder->codec_interface(), &cfg, 0))
+ die_codec(codec, "Failed to initialize encoder");
+
+ if (vpx_codec_control(codec, VP8E_SET_CPUUSED, 8))
+ die_codec(codec, "Failed to initialize cpuused");
+}
+
+void vpx_close(vpx_codec_ctx_t *codec, vpx_image_t *raw) {
+ // Flush encoder.
+ while (vpx_encode_frame(codec, NULL, -1, 0)) {
+ }
+
+ vpx_img_free(raw);
+ if (vpx_codec_destroy(codec)) die_codec(codec, "Failed to destroy codec.");
+}
diff --git a/code/test/vid/enc.h b/code/test/vid/enc.h
new file mode 100644
index 0000000..5acd8ce
--- /dev/null
+++ b/code/test/vid/enc.h
@@ -0,0 +1,7 @@
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+#include "tools.h"
+
+int vpx_encode_frame(vpx_codec_ctx_t *codec, vpx_image_t *img, int frame_index, int flags);
+void vpx_open(const char *codec_arg, int width, int height, int fps, int bitrate, vpx_codec_er_flags_t err_resilient, vpx_codec_ctx_t *codec, vpx_image_t *raw);
+void vpx_close(vpx_codec_ctx_t *codec, vpx_image_t *raw);
diff --git a/code/test/vid/server.c b/code/test/vid/server.c
new file mode 100644
index 0000000..9d43b7a
--- /dev/null
+++ b/code/test/vid/server.c
@@ -0,0 +1,76 @@
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <stdlib.h>
+
+#include "server.h"
+
+static ECPContext ctx;
+static ECPSocket sock;
+static ECPDHKey key_perma;
+static ECPConnHandler handler;
+
+static ECPConnection *conn_in;
+static int is_open = 0;
+
+#define CTYPE_TEST 0
+#define MTYPE_MSG 8
+
+ECPNode node;
+ECPConnection conn;
+
+static ssize_t handle_open(ECPConnection *c, ecp_seq_t sq, unsigned char t, unsigned char *m, ssize_t sz, ECP2Buffer *b) {
+ fprintf(stderr, "IS OPEN!\n");
+
+ ssize_t rv = ecp_conn_handle_open(c, sq, t, m, sz, b);
+ if (rv < 0) return rv;
+
+ conn_in = c;
+ is_open = 1;
+
+ return rv;
+}
+
+ssize_t send_frame(unsigned char *buffer, size_t size, ecp_pts_t pts) {
+ return ecp_send(conn_in, MTYPE_MSG, buffer, size);
+}
+
+int conn_is_open(void) {
+ return is_open;
+}
+
+int init_server(char *address, char *my_key, char *vcs_key) {
+ int rv;
+
+ rv = ecp_init(&ctx);
+ fprintf(stderr, "ecp_init RV:%d\n", rv);
+
+ if (!rv) rv = ecp_conn_handler_init(&handler);
+ if (!rv) {
+ handler.msg[ECP_MTYPE_OPEN] = handle_open;
+ ctx.handler[CTYPE_TEST] = &handler;
+ }
+
+ if (!rv) rv = ecp_util_key_load(&ctx, &key_perma, my_key);
+ fprintf(stderr, "ecp_util_key_load RV:%d\n", rv);
+
+ if (!rv) rv = ecp_sock_create(&sock, &ctx, &key_perma);
+ fprintf(stderr, "ecp_sock_create RV:%d\n", rv);
+
+ if (!rv) rv = ecp_sock_open(&sock, address);
+ fprintf(stderr, "ecp_sock_open RV:%d\n", rv);
+
+ if (!rv) rv = ecp_start_receiver(&sock);
+ fprintf(stderr, "ecp_start_receiver RV:%d\n", rv);
+
+ if (!rv) rv = ecp_util_node_load(&ctx, &node, vcs_key);
+ printf("ecp_util_node_load RV:%d\n", rv);
+
+ if (!rv) rv = ecp_conn_create(&conn, &sock, ECP_CTYPE_VLINK);
+ printf("ecp_conn_create RV:%d\n", rv);
+
+ if (!rv) rv = ecp_conn_open(&conn, &node);
+ printf("ecp_conn_open RV:%d\n", rv);
+
+ return rv;
+}
diff --git a/code/test/vid/server.h b/code/test/vid/server.h
new file mode 100644
index 0000000..2158b38
--- /dev/null
+++ b/code/test/vid/server.h
@@ -0,0 +1,7 @@
+#include "core.h"
+#include "vconn/vconn.h"
+#include "util.h"
+
+ssize_t send_frame(unsigned char *buffer, size_t size, ecp_pts_t pts);
+int conn_is_open(void);
+int init_server(char *address, char *my_key, char *vcs_key);
diff --git a/code/test/vid/tools.c b/code/test/vid/tools.c
new file mode 100644
index 0000000..0307ef6
--- /dev/null
+++ b/code/test/vid/tools.c
@@ -0,0 +1,165 @@
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "tools.h"
+
+static const VpxInterface vpx_encoders[] = {
+ { "vp8", VP8_FOURCC, &vpx_codec_vp8_cx },
+ { "vp9", VP9_FOURCC, &vpx_codec_vp9_cx },
+};
+
+int get_vpx_encoder_count(void) {
+ return sizeof(vpx_encoders) / sizeof(vpx_encoders[0]);
+}
+
+const VpxInterface *get_vpx_encoder_by_index(int i) { return &vpx_encoders[i]; }
+
+const VpxInterface *get_vpx_encoder_by_name(const char *name) {
+ int i;
+
+ for (i = 0; i < get_vpx_encoder_count(); ++i) {
+ const VpxInterface *encoder = get_vpx_encoder_by_index(i);
+ if (strcmp(encoder->name, name) == 0) return encoder;
+ }
+
+ return NULL;
+}
+
+static const VpxInterface vpx_decoders[] = {
+ { "vp8", VP8_FOURCC, &vpx_codec_vp8_dx },
+ { "vp9", VP9_FOURCC, &vpx_codec_vp9_dx },
+};
+
+int get_vpx_decoder_count(void) {
+ return sizeof(vpx_decoders) / sizeof(vpx_decoders[0]);
+}
+
+const VpxInterface *get_vpx_decoder_by_index(int i) { return &vpx_decoders[i]; }
+
+const VpxInterface *get_vpx_decoder_by_name(const char *name) {
+ int i;
+
+ for (i = 0; i < get_vpx_decoder_count(); ++i) {
+ const VpxInterface *const decoder = get_vpx_decoder_by_index(i);
+ if (strcmp(decoder->name, name) == 0) return decoder;
+ }
+
+ return NULL;
+}
+
+void die_codec(vpx_codec_ctx_t *ctx, const char *s) {
+ if (ctx) {
+ const char *detail = vpx_codec_error_detail(ctx);
+
+ fprintf(stderr, "%s: %s\n", s, vpx_codec_error(ctx));
+ if (detail) fprintf(stderr, " %s\n", detail);
+ } else {
+ fprintf(stderr, "%s", s);
+ }
+ exit(EXIT_FAILURE);
+}
+
+// TODO(dkovalev): move this function to vpx_image.{c, h}, so it will be part
+// of vpx_image_t support
+int vpx_img_plane_width(const vpx_image_t *img, int plane) {
+ if (plane > 0 && img->x_chroma_shift > 0)
+ return (img->d_w + 1) >> img->x_chroma_shift;
+ else
+ return img->d_w;
+}
+
+int vpx_img_plane_height(const vpx_image_t *img, int plane) {
+ if (plane > 0 && img->y_chroma_shift > 0)
+ return (img->d_h + 1) >> img->y_chroma_shift;
+ else
+ return img->d_h;
+}
+
+int vpx_img_read(vpx_image_t *img, void *img_buf, int sz) {
+ int plane;
+ int off = 0;
+
+ for (plane = 0; plane < 3; ++plane) {
+ unsigned char *buf = img->planes[plane];
+ const int stride = img->stride[plane];
+ const int w = vpx_img_plane_width(img, plane) *
+ ((img->fmt & VPX_IMG_FMT_HIGHBITDEPTH) ? 2 : 1);
+ const int h = vpx_img_plane_height(img, plane);
+ int y;
+
+ for (y = 0; y < h; ++y) {
+ if (off + w > sz) return 0;
+ memcpy(buf, img_buf + off, w);
+ off += w;
+ buf += stride;
+ }
+ }
+
+ return 1;
+}
+
+int vpx_img_write(const vpx_image_t *img, void *img_buf, int sz) {
+ int plane;
+ int off = 0;
+
+ for (plane = 0; plane < 3; ++plane) {
+ const unsigned char *buf = img->planes[plane];
+ const int stride = img->stride[plane];
+ const int w = vpx_img_plane_width(img, plane) *
+ ((img->fmt & VPX_IMG_FMT_HIGHBITDEPTH) ? 2 : 1);
+ const int h = vpx_img_plane_height(img, plane);
+ int y;
+
+ for (y = 0; y < h; ++y) {
+ if (off + w > sz) return 0;
+ memcpy(img_buf + off, buf, w);
+ off += w;
+ buf += stride;
+ }
+ }
+
+ return 1;
+}
+
+int vpx_img_read_f(vpx_image_t *img, FILE *file) {
+ int plane;
+
+ for (plane = 0; plane < 3; ++plane) {
+ unsigned char *buf = img->planes[plane];
+ const int stride = img->stride[plane];
+ const int w = vpx_img_plane_width(img, plane) *
+ ((img->fmt & VPX_IMG_FMT_HIGHBITDEPTH) ? 2 : 1);
+ const int h = vpx_img_plane_height(img, plane);
+ int y;
+
+ for (y = 0; y < h; ++y) {
+ if (fread(buf, 1, w, file) != (size_t)w) return 0;
+ buf += stride;
+ }
+ }
+
+ return 1;
+}
+
+int vpx_img_write_f(const vpx_image_t *img, FILE *file) {
+ int plane;
+
+ for (plane = 0; plane < 3; ++plane) {
+ const unsigned char *buf = img->planes[plane];
+ const int stride = img->stride[plane];
+ const int w = vpx_img_plane_width(img, plane) *
+ ((img->fmt & VPX_IMG_FMT_HIGHBITDEPTH) ? 2 : 1);
+ const int h = vpx_img_plane_height(img, plane);
+ int y;
+
+ for (y = 0; y < h; ++y) {
+ if (fwrite(buf, 1, w, file) != (size_t)w) return 0;
+ buf += stride;
+ }
+ }
+
+ return 1;
+}
+
diff --git a/code/test/vid/tools.h b/code/test/vid/tools.h
new file mode 100644
index 0000000..645d5ba
--- /dev/null
+++ b/code/test/vid/tools.h
@@ -0,0 +1,37 @@
+#ifndef TOOLS_COMMON_H_
+#define TOOLS_COMMON_H_
+
+#include "vpx/vpx_codec.h"
+#include "vpx/vpx_image.h"
+#include "vpx/vp8cx.h"
+#include "vpx/vp8dx.h"
+
+#define VP8_FOURCC 0x30385056
+#define VP9_FOURCC 0x30395056
+
+struct VpxRational {
+ int numerator;
+ int denominator;
+};
+
+typedef struct VpxInterface {
+ const char *const name;
+ const uint32_t fourcc;
+ vpx_codec_iface_t *(*const codec_interface)();
+} VpxInterface;
+
+const VpxInterface *get_vpx_encoder_by_index(int i);
+const VpxInterface *get_vpx_encoder_by_name(const char *name);
+
+const VpxInterface *get_vpx_decoder_by_index(int i);
+const VpxInterface *get_vpx_decoder_by_name(const char *name);
+
+void die_codec(vpx_codec_ctx_t *ctx, const char *s);
+
+int vpx_img_read(vpx_image_t *img, void *img_buf, int sz);
+int vpx_img_write(const vpx_image_t *img, void *img_buf, int sz);
+
+int vpx_img_read_f(vpx_image_t *img, FILE *file);
+int vpx_img_write_f(const vpx_image_t *img, FILE *file);
+
+#endif \ No newline at end of file