diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Modules/FindFFmpeg.cmake libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Modules/FindFFmpeg.cmake --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Modules/FindFFmpeg.cmake 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Modules/FindFFmpeg.cmake 2019-03-21 07:31:31.000000000 +0000 @@ -1,151 +1,161 @@ -# - Try to find FFMPEG +# vim: ts=2 sw=2 +# - Try to find the required ffmpeg components(default: AVFORMAT, AVUTIL, AVCODEC) +# # Once done this will define -# -# FFMPEG_FOUND - system has FFMPEG -# FFMPEG_INCLUDE_DIR - the include directory -# FFMPEG_LIBRARY_DIR - the directory containing the libraries -# FFMPEG_LIBRARIES - Link these to use FFMPEG -# - -# FindAvformat -FIND_PATH( AVFORMAT_INCLUDE_DIR libavformat/avformat.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVFORMAT_LIBRARY avformat avformat-55 avformat-57 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindAvcodec -FIND_PATH( AVCODEC_INCLUDE_DIR libavcodec/avcodec.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVCODEC_LIBRARY avcodec avcodec-55 avcodec-57 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindAvutil -FIND_PATH( AVUTIL_INCLUDE_DIR libavutil/avutil.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVUTIL_LIBRARY avutil avutil-52 avutil-55 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindAvdevice -FIND_PATH( AVDEVICE_INCLUDE_DIR libavdevice/avdevice.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVDEVICE_LIBRARY avdevice avdevice-55 avdevice-56 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindSwscale -FIND_PATH( SWSCALE_INCLUDE_DIR libswscale/swscale.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( SWSCALE_LIBRARY swscale swscale-2 swscale-4 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindAvresample -FIND_PATH( AVRESAMPLE_INCLUDE_DIR libavresample/avresample.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVRESAMPLE_LIBRARY avresample avresample-2 avresample-3 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -SET( FFMPEG_FOUND FALSE ) - -IF ( AVFORMAT_INCLUDE_DIR AND AVFORMAT_LIBRARY ) - SET ( AVFORMAT_FOUND TRUE ) -ENDIF ( AVFORMAT_INCLUDE_DIR AND AVFORMAT_LIBRARY ) - -IF ( AVCODEC_INCLUDE_DIR AND AVCODEC_LIBRARY ) - SET ( AVCODEC_FOUND TRUE) -ENDIF ( AVCODEC_INCLUDE_DIR AND AVCODEC_LIBRARY ) - -IF ( AVUTIL_INCLUDE_DIR AND AVUTIL_LIBRARY ) - SET ( AVUTIL_FOUND TRUE ) -ENDIF ( AVUTIL_INCLUDE_DIR AND AVUTIL_LIBRARY ) - -IF ( AVDEVICE_INCLUDE_DIR AND AVDEVICE_LIBRARY ) - SET ( AVDEVICE_FOUND TRUE ) -ENDIF ( AVDEVICE_INCLUDE_DIR AND AVDEVICE_LIBRARY ) - -IF ( SWSCALE_INCLUDE_DIR AND SWSCALE_LIBRARY ) - SET ( SWSCALE_FOUND TRUE ) -ENDIF ( SWSCALE_INCLUDE_DIR AND SWSCALE_LIBRARY ) - -IF ( AVRESAMPLE_INCLUDE_DIR AND AVRESAMPLE_LIBRARY ) - SET ( AVRESAMPLE_FOUND TRUE ) -ENDIF ( AVRESAMPLE_INCLUDE_DIR AND AVRESAMPLE_LIBRARY ) - -IF ( AVFORMAT_INCLUDE_DIR OR AVCODEC_INCLUDE_DIR OR AVUTIL_INCLUDE_DIR OR AVDEVICE_FOUND OR SWSCALE_FOUND OR AVRESAMPLE_FOUND ) - - SET ( FFMPEG_FOUND TRUE ) - - SET ( FFMPEG_INCLUDE_DIR - ${AVFORMAT_INCLUDE_DIR} - ${AVCODEC_INCLUDE_DIR} - ${AVUTIL_INCLUDE_DIR} - ${AVDEVICE_INCLUDE_DIR} - ${SWSCALE_INCLUDE_DIR} - ${AVRESAMPLE_INCLUDE_DIR} ) - - SET ( FFMPEG_LIBRARIES - ${AVFORMAT_LIBRARY} - ${AVCODEC_LIBRARY} - ${AVUTIL_LIBRARY} - ${AVDEVICE_LIBRARY} - ${SWSCALE_LIBRARY} - ${AVRESAMPLE_LIBRARY} ) - -ENDIF ( AVFORMAT_INCLUDE_DIR OR AVCODEC_INCLUDE_DIR OR AVUTIL_INCLUDE_DIR OR AVDEVICE_FOUND OR SWSCALE_FOUND OR AVRESAMPLE_FOUND ) - -MARK_AS_ADVANCED( - FFMPEG_LIBRARY_DIR - FFMPEG_INCLUDE_DIR -) +# FFMPEG_FOUND - System has the all required components. +# FFMPEG_INCLUDE_DIRS - Include directory necessary for using the required components headers. +# FFMPEG_LIBRARIES - Link these to use the required ffmpeg components. +# FFMPEG_DEFINITIONS - Compiler switches required for using the required ffmpeg components. +# +# For each of the components it will additionally set. +# - AVCODEC +# - AVDEVICE +# - AVFORMAT +# - AVFILTER +# - AVUTIL +# - POSTPROC +# - SWSCALE +# - SWRESAMPLE +# - AVRESAMPLE +# the following variables will be defined +# _FOUND - System has +# _INCLUDE_DIRS - Include directory necessary for using the headers +# _LIBRARIES - Link these to use +# _DEFINITIONS - Compiler switches required for using +# _VERSION - The components version +# +# Copyright (c) 2006, Matthias Kretz, +# Copyright (c) 2008, Alexander Neundorf, +# Copyright (c) 2011, Michael Jansen, +# +# Redistribution and use is allowed according to the terms of the BSD license. +# For details see the accompanying COPYING-CMAKE-SCRIPTS file. include(FindPackageHandleStandardArgs) -# handle the QUIETLY and REQUIRED arguments and set FFMPEG_FOUND to TRUE -# if all listed variables are TRUE -find_package_handle_standard_args(FFMPEG DEFAULT_MSG - FFMPEG_LIBRARIES FFMPEG_INCLUDE_DIR) + +# The default components were taken from a survey over other FindFFMPEG.cmake files +if (NOT FFmpeg_FIND_COMPONENTS) + set(FFmpeg_FIND_COMPONENTS AVCODEC AVFORMAT AVUTIL) +endif () + +# +### Macro: set_component_found +# +# Marks the given component as found if both *_LIBRARIES AND *_INCLUDE_DIRS is present. +# +macro(set_component_found _component ) + if (${_component}_LIBRARIES AND ${_component}_INCLUDE_DIRS) + # message(STATUS " - ${_component} found.") + set(${_component}_FOUND TRUE) + else () + # message(STATUS " - ${_component} not found.") + endif () +endmacro() + +# +### Macro: find_component +# +# Checks for the given component by invoking pkgconfig and then looking up the libraries and +# include directories. +# +macro(find_component _component _pkgconfig _library _header) + + if (NOT WIN32) + # use pkg-config to get the directories and then use these values + # in the FIND_PATH() and FIND_LIBRARY() calls + find_package(PkgConfig) + if (PKG_CONFIG_FOUND) + pkg_check_modules(PC_${_component} ${_pkgconfig}) + endif () + endif (NOT WIN32) + + find_path(${_component}_INCLUDE_DIRS ${_header} + HINTS + /opt/ + /opt/include/ + ${PC_LIB${_component}_INCLUDEDIR} + ${PC_LIB${_component}_INCLUDE_DIRS} + $ENV{FFMPEGDIR}/include/ + $ENV{FFMPEGDIR}/include/ffmpeg/ + PATH_SUFFIXES + ffmpeg + ) + + find_library(${_component}_LIBRARIES NAMES ${_library} + HINTS + ${PC_LIB${_component}_LIBDIR} + ${PC_LIB${_component}_LIBRARY_DIRS} + $ENV{FFMPEGDIR}/lib/ + $ENV{FFMPEGDIR}/lib/ffmpeg/ + $ENV{FFMPEGDIR}/bin/ + ) + + set(${_component}_DEFINITIONS ${PC_${_component}_CFLAGS_OTHER} CACHE STRING "The ${_component} CFLAGS.") + set(${_component}_VERSION ${PC_${_component}_VERSION} CACHE STRING "The ${_component} version number.") + + set_component_found(${_component}) + + mark_as_advanced( + ${_component}_INCLUDE_DIRS + ${_component}_LIBRARIES + ${_component}_DEFINITIONS + ${_component}_VERSION) + +endmacro() + + +# Check for cached results. If there are skip the costly part. +if (NOT FFMPEG_LIBRARIES) + + # Check for all possible component. + find_component(AVCODEC libavcodec avcodec libavcodec/avcodec.h) + find_component(AVFORMAT libavformat avformat libavformat/avformat.h) + find_component(AVDEVICE libavdevice avdevice libavdevice/avdevice.h) + find_component(AVUTIL libavutil avutil libavutil/avutil.h) + find_component(AVFILTER libavfilter avfilter libavfilter/avfilter.h) + find_component(SWSCALE libswscale swscale libswscale/swscale.h) + find_component(POSTPROC libpostproc postproc libpostproc/postprocess.h) + find_component(SWRESAMPLE libswresample swresample libswresample/swresample.h) + find_component(AVRESAMPLE libavresample avresample libavresample/avresample.h) + + # Check if the required components were found and add their stuff to the FFMPEG_* vars. + foreach (_component ${FFmpeg_FIND_COMPONENTS}) + if (${_component}_FOUND) + # message(STATUS "Required component ${_component} present.") + set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} ${${_component}_LIBRARIES}) + set(FFMPEG_DEFINITIONS ${FFMPEG_DEFINITIONS} ${${_component}_DEFINITIONS}) + list(APPEND FFMPEG_INCLUDE_DIRS ${${_component}_INCLUDE_DIRS}) + else () + # message(STATUS "Required component ${_component} missing.") + endif () + endforeach () + + # Build the include path with duplicates removed. + if (FFMPEG_INCLUDE_DIRS) + list(REMOVE_DUPLICATES FFMPEG_INCLUDE_DIRS) + endif () + + # cache the vars. + set(FFMPEG_INCLUDE_DIRS ${FFMPEG_INCLUDE_DIRS} CACHE STRING "The FFmpeg include directories." FORCE) + set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} CACHE STRING "The FFmpeg libraries." FORCE) + set(FFMPEG_DEFINITIONS ${FFMPEG_DEFINITIONS} CACHE STRING "The FFmpeg cflags." FORCE) + + mark_as_advanced(FFMPEG_INCLUDE_DIRS + FFMPEG_LIBRARIES + FFMPEG_DEFINITIONS) + +endif () + +# Now set the noncached _FOUND vars for the components. +foreach (_component AVCODEC AVDEVICE AVFORMAT AVUTIL POSTPROCESS SWSCALE SWRESAMPLE AVRESAMPLE) + set_component_found(${_component}) +endforeach () + +# Compile the list of required vars +set(_FFmpeg_REQUIRED_VARS FFMPEG_LIBRARIES FFMPEG_INCLUDE_DIRS) +foreach (_component ${FFmpeg_FIND_COMPONENTS}) + list(APPEND _FFmpeg_REQUIRED_VARS ${_component}_LIBRARIES ${_component}_INCLUDE_DIRS) +endforeach () + +# Give a nice error message if some of the required vars are missing. +find_package_handle_standard_args(FFmpeg DEFAULT_MSG ${_FFmpeg_REQUIRED_VARS}) \ No newline at end of file diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Modules/FindOpenShotAudio.cmake libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Modules/FindOpenShotAudio.cmake --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Modules/FindOpenShotAudio.cmake 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Modules/FindOpenShotAudio.cmake 2019-03-21 07:31:31.000000000 +0000 @@ -9,9 +9,9 @@ # Find the base directory of juce includes find_path(LIBOPENSHOT_AUDIO_BASE_DIR JuceHeader.h - PATHS /usr/include/libopenshot-audio/ - /usr/local/include/libopenshot-audio/ - $ENV{LIBOPENSHOT_AUDIO_DIR}/include/libopenshot-audio/ ) + PATHS $ENV{LIBOPENSHOT_AUDIO_DIR}/include/libopenshot-audio/ + /usr/include/libopenshot-audio/ + /usr/local/include/libopenshot-audio/ ) # Get a list of all header file paths FILE(GLOB_RECURSE JUCE_HEADER_FILES @@ -24,7 +24,7 @@ get_filename_component(HEADER_DIRECTORY ${HEADER_PATH} PATH ) - + # Append each directory into the HEADER_DIRECTORIES list LIST(APPEND HEADER_DIRECTORIES ${HEADER_DIRECTORY}) ENDFOREACH(HEADER_PATH) @@ -32,13 +32,18 @@ # Remove duplicates from the header directories list LIST(REMOVE_DUPLICATES HEADER_DIRECTORIES) -# Find the libopenshot-audio.so / libopenshot-audio.dll library +# Find the libopenshot-audio.so (check env var first) +find_library(LIBOPENSHOT_AUDIO_LIBRARY + NAMES libopenshot-audio openshot-audio + PATHS $ENV{LIBOPENSHOT_AUDIO_DIR}/lib/ NO_DEFAULT_PATH) + +# Find the libopenshot-audio.so / libopenshot-audio.dll library (fallback) find_library(LIBOPENSHOT_AUDIO_LIBRARY NAMES libopenshot-audio openshot-audio - HINTS /usr/lib/ + HINTS $ENV{LIBOPENSHOT_AUDIO_DIR}/lib/ + /usr/lib/ /usr/lib/libopenshot-audio/ - /usr/local/lib/ - $ENV{LIBOPENSHOT_AUDIO_DIR}/lib/ ) + /usr/local/lib/ ) set(LIBOPENSHOT_AUDIO_LIBRARIES ${LIBOPENSHOT_AUDIO_LIBRARY}) set(LIBOPENSHOT_AUDIO_LIBRARY ${LIBOPENSHOT_AUDIO_LIBRARIES}) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Modules/FindRESVG.cmake libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Modules/FindRESVG.cmake --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Modules/FindRESVG.cmake 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Modules/FindRESVG.cmake 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,28 @@ +# - Try to find RESVG +# Once done this will define +# RESVG_FOUND - System has RESVG +# RESVG_INCLUDE_DIRS - The RESVG include directories +# RESVG_LIBRARIES - The libraries needed to use RESVG +find_path ( RESVG_INCLUDE_DIR ResvgQt.h + PATHS ${RESVGDIR}/include/resvg + $ENV{RESVGDIR}/include/resvg + $ENV{RESVGDIR}/include + /usr/include/resvg + /usr/include + /usr/local/include/resvg + /usr/local/include ) + +find_library ( RESVG_LIBRARY NAMES resvg + PATHS /usr/lib + /usr/local/lib + $ENV{RESVGDIR} + $ENV{RESVGDIR}/lib ) + +set ( RESVG_LIBRARIES ${RESVG_LIBRARY} ) +set ( RESVG_INCLUDE_DIRS ${RESVG_INCLUDE_DIR} ) + +include ( FindPackageHandleStandardArgs ) +# handle the QUIETLY and REQUIRED arguments and set RESVG_FOUND to TRUE +# if all listed variables are TRUE +find_package_handle_standard_args ( RESVG "Could NOT find RESVG, using Qt SVG parsing instead" RESVG_LIBRARY RESVG_INCLUDE_DIR ) +mark_as_advanced( RESVG_LIBRARY RESVG_INCLUDE_DIR ) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Windows/build-imagemagick.sh libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Windows/build-imagemagick.sh --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Windows/build-imagemagick.sh 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Windows/build-imagemagick.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,274 +0,0 @@ -#!/bin/bash -# xml2 build ok but failed test -# libfpx build error - -function ised() { - IN=$1 - shift - tmp=$RANDOM.$$ - <$IN sed "$@" >$tmp && cat $tmp > $IN - rm $tmp -} - -function ask() { - read -p "${1:-Are you sure?]} [Y/n] " response - case $response in - y|Y|"") - true;; - *) - false;; - esac -} - -function download() { - while IFS=\; read url md5 <&3; do - fileName=${url##*/} - - echo "Downloading ${fileName}..." - while true; do - if [[ ! -e $fileName ]]; then - wget ${url} -O ${fileName} - else - echo "File exists!" - fi - - localMd5=$(md5sum ${fileName} | cut -d\ -f1) - - if [[ ${localMd5} != ${md5} ]]; then - ask "Checksum failed. Do you want to download this file again? [Y/n] " - if [[ $? -ne 0 ]]; then - exit 1 - fi - rm ${fileName} - else - break - fi - done - done 3< urls.txt -} - -function extract() { - file=$1 - if [[ ! -e ${file} ]]; then - return - fi - - case $file in - *.tar.gz) - tar xzf $file - ;; - *.tar.xz|*.tar.lzma) - tar xJf $file - ;; - *.tar.bz2) - tar xjf $file - ;; - *) - "Don't know how to extract $file" - esac -} - -function isLibsInstalled() { - libs="$@" - notfound=false - for l in "${libs}"; do - ld -L/usr/local/lib -l"${l}" 2>/dev/null - if [[ $? -ne 0 ]]; then - notfound=true - fi - done - - ! ${notfound} -} - -function isDirExists() { - dir="$@" - found=false - for d in ${dir}; do - if [[ -d "${d}" ]]; then - found=true - break - fi - done - - ${found} -} - -function extractIfNeeded() { - file=$1 - isDirExists ${file%%-*}-* - if [[ $? -ne 0 ]]; then - echo "Extracting $file" - extract $file - fi -} - -function buildbzip2() { - if isLibsInstalled "bz2"; then - if ask "Found bzip2 installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded bzip2-*.tar.lzma - - cd bzip2-*/ - tar xzf bzip2-1.0.6.tar.gz - tar xzf cygming-autotools-buildfiles.tar.gz - cd bzip2-*/ - autoconf - mkdir ../build - cd ../build - ../bzip2-*/configure - make - make install - cd ../.. -} - -function buildzlib() { - if isLibsInstalled "z"; then - if ask "Found zlib installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded zlib-*.tar.xz - - cd zlib-*/ - INCLUDE_PATH=/usr/local/include LIBRARY_PATH=/usr/local/lib BINARY_PATH=/usr/local/bin make install -f win32/Makefile.gcc SHARED_MODE=1 - cd .. -} - -function buildlibxml2() { - if isLibsInstalled "xml2"; then - if ask "Found libxml2 installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extractIfNeeded libxml2-*.tar.gz - cd libxml2-*/win32/ - ised configure.js 's/dirSep = "\\\\";/dirSep = "\/";/' - cscript.exe configure.js compiler=mingw prefix=/usr/local - # ised ../dict.c '/typedef.*uint32_t;$/d' - ised Makefile.mingw 's/cmd.exe \/C "\?if not exist \(.*\) mkdir \1"\?/mkdir -p \1/' - ised Makefile.mingw 's/cmd.exe \/C "copy\(.*\)"/cp\1/' - ised Makefile.mingw '/cp/{y/\\/\//;}' - ised Makefile.mingw '/PREFIX/{y/\\/\//;}' - make -f Makefile.mingw - make -f Makefile.mingw install - cd ../../ -} - -function buildlibpng() { - if isLibsInstalled "png"; then - if ask "Found libpng installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded libpng-*.tar.xz - - cd libpng-*/ - make -f scripts/makefile.msys - make install -f scripts/makefile.msys - cd .. -} - -function buildjpegsrc() { - if isLibsInstalled "jpeg"; then - if ask "Found jpegsrc installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extract jpegsrc*.tar.gz - - cd jpeg-*/ - ./configure - make - make install - cd .. -} - -function buildfreetype() { - if isLibsInstalled "freetype"; then - if ask "Found freetype installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract freetype*.tar.bz2 - - INCLUDE_PATH=/usr/local/include - LIBRARY_PATH=/usr/local/lib - BINARY_PATH=/usr/local/bin - cd freetype-*/ - ./configure - make - make install - cd .. -} - -function buildlibwmf() { - if isLibsInstalled "wmf"; then - if ask "Found libwmf installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract libwmf*.tar.gz - - cd libwmf-*/ - ./configure CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd .. -} - -function buildlibwebp() { - if isLibsInstalled "webp"; then - if ask "Found libwebp installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract libwebp*.tar.gz - - cd libwebp-*/ - ./configure CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd .. -} - -function buildDelegate() { - delegates="bzip2 zlib libxml2 libpng jpegsrc freetype libwmf libwebp" - for d in ${delegates}; do - echo "**********************************************************" - echo "Building $d" - build${d} - done -} - -function build() { - extractIfNeeded ImageMagick-*.tar.xz - - local oldPwd=$(pwd -L) - cd ImageMagick-*/ - # patch configure - #sed -i 's/${GDI32_LIBS}x" !=/${GDI32_LIBS} ==/' configure - ised configure 's/${GDI32_LIBS}x" !=/${GDI32_LIBS} ==/' - ./configure --enable-shared --disable-static --enable-delegate-build --without-modules CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd ${oldPwd} -} - -download -buildDelegate -build diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Windows/README libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Windows/README --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Windows/README 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Windows/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -#################################################################### - Install Dependencies for Windows -#################################################################### - -Install MSYS2 (64 bit environment) -Append PATH: - $ PATH=$PATH:/c/msys64/mingw64/bin:/c/msys64/mingw64/lib - -Sync pacman - $ pacman -Syu - -Install the following packages: - $ pacman -S mingw-w64-x86_64-toolchain - $ pacman -S mingw64/mingw-w64-x86_64-ffmpeg - $ pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq - $ pacman -S mingw64/mingw-w64-x86_64-python3-pyqt5 - $ pacman -S mingw64/mingw-w64-x86_64-swig - $ pacman -S mingw64/mingw-w64-x86_64-cmake - $ pacman -S mingw64/mingw-w64-x86_64-doxygen - $ pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq - $ pacman -S mingw64/mingw-w64-x86_64-python3-pip - $ pacman -S git - -Install ImageMagick if needed (OPTIONAL and NOT NEEDED) - $ pacman -S mingw64/mingw-w64-x86_64-imagemagick - -Install httplib2 for Python3 - $ pip3 install httplib2 - $ pip3 install slacker - $ pip3 install tinys3 - -Download Unittest++ into /c/home/jonathan/unittest-cpp-master/ -Configure Unittest++: - $ cmake -G "MSYS Makefiles" ../ -DCMAKE_MAKE_PROGRAM=mingw32-make -Build Unittest++ (as Administrator): - $ mingw32-make install (NOTE: This installs to C:\Program Files (x86)\UnitTest++) - -ZMQ++ Header - NOTE: Download and copy zmq.hpp into the /c/msys64/mingw64/include/ folder - - -#################################################################### - OPTIONAL: Installing ImageMagick on Windows -#################################################################### - -If you would rather install ImageMagick from source code yourself, follow these steps: - -Step 1) Copy [build-imagemagick.sh and urls.txt] into your local MSYS2 environment -Step 2) Run MSYS2 Shell -Step 3) Execute this command - $ ./build-imagemagick.sh \ No newline at end of file diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Windows/urls.txt libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Windows/urls.txt --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/cmake/Windows/urls.txt 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/cmake/Windows/urls.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -ftp://ftp.imagemagick.org/pub/ImageMagick/releases/ImageMagick-6.8.8-10.tar.xz;ab9b397c1d4798a9f6ae6cc94aa292fe -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libpng-1.6.20.tar.xz;3968acb7c66ef81a9dab867f35d0eb4b -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libwebp-0.4.4.tar.gz;b737062cf688e502b940b460ddc3015f -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libwmf-0.2.8.4.tar.gz;d1177739bf1ceb07f57421f0cee191e0 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libxml2-2.9.3.tar.gz;daece17e045f1c107610e137ab50c179 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/zlib-1.2.8.tar.xz;28f1205d8dd2001f26fec1e8c2cebe37 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/freetype-2.6.2.tar.bz2;86109d0c998787d81ac582bad9adf82e -http://ncu.dl.sourceforge.net/project/mingw/MinGW/Extension/bzip2/bzip2-1.0.6-4/bzip2-1.0.6-4-mingw32-src.tar.lzma;2a25de4331d1e6e1458d8632dff55fad -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libfpx-1.3.1-4.tar.xz;65e2cf8dcf230ad0b90aead35553bbda -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/jpegsrc.v9a.tar.gz;3353992aecaee1805ef4109aadd433e7 diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.cproject libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.cproject --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.cproject 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.cproject 1970-01-01 00:00:00.000000000 +0000 @@ -1,405 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - cmake - -G "Unix Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - cmake - -G "Unix Makefiles" ../ -D"MAGICKCORE_HDRI_ENABLE=0" -D"MAGICKCORE_QUANTUM_DEPTH=16" -D"OPENSHOT_IMAGEMAGICK_COMPATIBILITY=0" -D"ENABLE_BLACKMAGIC=1" -D"CMAKE_BUILD_TYPE:STRING=Debug" -D"DISABLE_TESTS=0" - - true - false - true - - - make - test - true - false - true - - - make - help - true - false - true - - - make - doc - true - false - true - - - cmake - -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.1.1/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" - true - false - true - - - cmake - -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.1.1/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - cmake - -G "MinGW Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" - true - false - true - - - cmake - -G "MinGW Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - - diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/bzr-builder.manifest libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/bzr-builder.manifest --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/bzr-builder.manifest 2017-11-23 22:40:14.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/bzr-builder.manifest 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -# bzr-builder format 0.3 deb-version {debupstream}+0+625+119+201711232240+daily -lp:libopenshot revid:jonathan@openshot.org-20171114061542-rviygpp1hz0rbd4n -nest-part packaging lp:~openshot.packagers/libopenshot/packaging libopenshot/debian debian revid:jonathan@openshot.org-20171123222826-p6atp9245el18xxf diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/changelog libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/changelog --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/changelog 2017-11-23 22:40:14.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/changelog 2019-03-21 07:31:37.000000000 +0000 @@ -1,8 +1,26 @@ -libopenshot (0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1) zesty; urgency=low +libopenshot (0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1) bionic; urgency=low * Auto build. - -- OpenShot Code Thu, 23 Nov 2017 22:40:14 +0000 + -- OpenShot Code Thu, 21 Mar 2019 07:31:37 +0000 + +libopenshot (0.2.3-1) UNRELEASED; urgency=medium + + * New upstream release + + -- Jonathan Thomas Wed, 20 Mar 2019 20:47:26 -0500 + +libopenshot (0.2.1-1) UNRELEASED; urgency=medium + + * New upstream release + + -- Jonathan Thomas Wed, 19 Sep 2018 00:26:01 -0500 + +libopenshot (0.2.0-1) UNRELEASED; urgency=medium + + * New upstream release + + -- Jonathan Thomas Sat, 30 Jun 2018 01:26:24 -0500 libopenshot (0.1.9-1) UNRELEASED; urgency=medium diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/control libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/control --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/control 2017-11-23 22:40:13.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/control 2019-03-21 07:31:37.000000000 +0000 @@ -27,16 +27,18 @@ Standards-Version: 3.9.6 Homepage: http://www.openshot.org/ -Package: libopenshot14 +Package: libopenshot17 Architecture: any Multi-Arch: same Depends: ${misc:Depends}, ${shlibs:Depends} Pre-Depends: ${misc:Pre-Depends} -Replaces: libopenshot13, +Replaces: libopenshot16, + libopenshot15, + libopenshot14, + libopenshot13, libopenshot12, - libopenshot11, - libopenshot10 + libopenshot11 Description: library for high quality video editing Libopenshot is an open-source, cross-platform C++ library dedicated to delivering high quality video editing, animation, and playback solutions @@ -48,7 +50,7 @@ Architecture: any Multi-Arch: same Section: libdevel -Depends: libopenshot14 (= ${binary:Version}), +Depends: libopenshot17 (= ${binary:Version}), ${misc:Depends} Suggests: libopenshot-doc Description: development files for the OpenShot library diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/git-build-recipe.manifest libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/git-build-recipe.manifest --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/git-build-recipe.manifest 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/git-build-recipe.manifest 2019-03-21 07:31:37.000000000 +0000 @@ -0,0 +1,3 @@ +# git-build-recipe format 0.4 deb-version {debupstream}+dfsg2+714+201903210731+daily +lp:libopenshot git-commit:d04a71ee7c413d79e192279d49d4a52bd980cba4 +nest-part packaging lp:openshot-packaging libopenshot/debian debian git-commit:af8803c31f6b97423213973835e216d6c9260139 diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/libopenshot14.install libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/libopenshot14.install --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/libopenshot14.install 2017-11-23 22:40:13.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/libopenshot14.install 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -usr/lib/*/*.so.* diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/libopenshot17.install libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/libopenshot17.install --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/libopenshot17.install 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/debian/libopenshot17.install 2019-03-21 07:31:37.000000000 +0000 @@ -0,0 +1 @@ +usr/lib/*/*.so.* Binary files /tmp/tmpxz8q_Q/9vimbt7Lyc/libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/doc/InstallationGuide.pdf and /tmp/tmpxz8q_Q/1QI56EgdGP/libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/doc/InstallationGuide.pdf differ diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/doc/INSTALL-LINUX.md libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/doc/INSTALL-LINUX.md --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/doc/INSTALL-LINUX.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/doc/INSTALL-LINUX.md 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,225 @@ +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for +dependencies, locates header files and libraries, generates makefiles, and supports the cross-platform +compiling of libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where +all temporary build files, such as makefiles, object files, and even the final binaries, are created +outside of the source code folder, inside a /build/ sub-folder. This prevents the build process +from cluttering up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is +compiled. Some of these flags might be required when compiling on certain OSes, just depending +on how your build environment is setup. To add a build flag, follow this general syntax: +`cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code is +available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command +to obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Install Dependencies + +In order to actually compile libopenshot, we need to install some dependencies on your system. The easiest +way to accomplish this is with our Daily PPA. A PPA is an unofficial Ubuntu repository, which has our +software packages available to download and install. + +``` + sudo add-apt-repository ppa:openshot.developers/libopenshot-daily + sudo apt-get update + sudo apt-get install openshot-qt \ + cmake \ + libx11-dev \ + libasound2-dev \ + libavcodec-dev \ + libavdevice-dev \ + libavfilter-dev \ + libavformat-dev \ + libavresample-dev \ + libavutil-dev \ + libfdk-aac-dev \ + libfreetype6-dev \ + libjsoncpp-dev \ + libmagick++-dev \ + libopenshot-audio-dev \ + libswscale-dev \ + libunittest++-dev \ + libxcursor-dev \ + libxinerama-dev \ + libxrandr-dev \ + libzmq3-dev \ + pkg-config \ + python3-dev \ + qtbase5-dev \ + qtmultimedia5-dev \ + swig +``` + +## Linux Build Instructions (libopenshot-audio) +To compile libopenshot-audio, we need to go through a few additional steps to manually build and +install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake ../ +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Linux Build Instructions (libopenshot) +Run the following commands to compile libopenshot: + +``` +cd [libopenshot repo directory] +mkdir -p build +cd build +cmake ../ +make +``` + +If you are missing any dependencies for libopenshot, you might receive error messages at this point. +Just install the missing packages (usually with a -dev suffix), and run the above commands again. +Repeat until no error messages are displayed, and the build process completes. Also, if you manually +install Qt 5, you might need to specify the location for cmake: + +``` +cmake -DCMAKE_PREFIX_PATH=/qt5_path/qt5/5.2.0/ ../ +``` + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +make test +``` + +To auto-generate documentation for libopenshot, launch a terminal, and enter: + +``` +make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods documented. The +folder is located at **build/doc/html/**. Once libopenshot has been successfully built, we need to +install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +make install +``` + +This will copy the binary files to /usr/local/lib/, and the header files to /usr/local/include/openshot/... +This is where other projects will look for the libopenshot files when building. Python 3 bindings are +also installed at this point. let's verify the python bindings work: + +``` +python3 +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on your system. +Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/doc/INSTALL-MAC.md libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/doc/INSTALL-MAC.md --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/doc/INSTALL-MAC.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/doc/INSTALL-MAC.md 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,218 @@ +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for +dependencies, locates header files and libraries, generates makefiles, and supports the cross-platform +compiling of libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where +all temporary build files, such as makefiles, object files, and even the final binaries, are created +outside of the source code folder, inside a /build/ sub-folder. This prevents the build process +from cluttering up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is compiled. +Some of these flags might be required when compiling on certain OSes, just depending on how your build +environment is setup. To add a build flag, follow this general syntax: +`cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code +is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to +obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Install Dependencies + +In order to actually compile libopenshot and libopenshot-audio, we need to install some dependencies on +your system. Most packages needed by libopenshot can be installed easily with Homebrew. However, first +install Xcode with the following options ("UNIX Development", "System Tools", "Command Line Tools", or +"Command Line Support"). Be sure to refresh your list of Homebrew packages with the “brew update” command. + +**NOTE:** Homebrew seems to work much better for most users (compared to MacPorts), so I am going to +focus on brew for this guide. + +Install the following packages using the Homebrew package installer (http://brew.sh/). Pay close attention +to any warnings or errors during these brew installs. NOTE: You might have some conflicting libraries in +your /usr/local/ folders, so follow the directions from brew if these are detected. + +``` +brew install gcc48 --enable-all-languages +brew install ffmpeg +brew install librsvg +brew install swig +brew install doxygen +brew install unittest-cpp --cc=gcc-4.8. You must specify the c++ compiler with the --cc flag to be 4.7 or 4.8. +brew install qt5 +brew install cmake +brew install zeromq +``` + +## Mac Build Instructions (libopenshot-audio) +Since libopenshot-audio is not available in a Homebrew or MacPorts package, we need to go through a +few additional steps to manually build and install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake -d -G "Unix Makefiles" -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang ../ (CLang must be used due to GNU incompatible Objective-C code in some of the Apple frameworks) +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Mac Build Instructions (libopenshot) +Run the following commands to build libopenshot: + +``` +$ cd [libopenshot repo folder] +$ mkdir build +$ cd build +$ cmake -G "Unix Makefiles" -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.4.2/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" +``` + +The extra arguments on the cmake command make sure the compiler will be gcc4.8 and that cmake +knows where to look for the Qt header files and Python library. Double check these file paths, +as yours will likely be different. + +``` +make +``` + +If you are missing any dependencies for libopenshot, you will receive error messages at this point. +Just install the missing dependencies, and run the above commands again. Repeat until no error +messages are displayed and the build process completes. + +Also, if you are having trouble building, please see the CMake Flags section above, as it might +provide a solution for finding a missing folder path, missing Python 3 library, etc... + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +make test +``` + +To auto-generate the documentation for libopenshot, launch a terminal, and enter: + +``` +make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods documented. +The folder is located at build/doc/html/. Once libopenshot has been successfully built, we need +to install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +make install +``` + +This should copy the binary files to /usr/local/lib/, and the header files to /usr/local/include/openshot/... +This is where other projects will look for the libopenshot files when building. Python 3 bindings are +also installed at this point. let's verify the python bindings work: + +``` +python3 (or python) +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on your +system. Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/doc/INSTALL-WINDOWS.md libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/doc/INSTALL-WINDOWS.md --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/doc/INSTALL-WINDOWS.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/doc/INSTALL-WINDOWS.md 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,329 @@ +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the +source code, install a development IDE and tools, and better understand our dependencies. So, +please read through the following sections, and follow the instructions. And keep in mind, +that your computer is likely different than the one used when writing these instructions. +Your file paths and versions of applications might be slightly different, so keep an eye out +for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which +checks for dependencies, locates header files and libraries, generates makefiles, and +supports the cross-platform compiling of libopenshot and libopenshot-audio. CMake uses +an out-of-source build concept, where all temporary build files, such as makefiles, +object files, and even the final binaries, are created outside of the source code +folder, inside a /build/ sub-folder. This prevents the build process from cluttering +up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to +install these dependencies vary for each operating system. Libraries and Executables +have been labeled in the list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot +is compiled. Some of these flags might be required when compiling on certain OSes, just +depending on how your build environment is setup. To add a build flag, follow this general +syntax: `cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Environment Variables + +Many environment variables will need to be set during this Windows installation guide. +The command line will need to be closed and re-launched after any changes to your environment +variables. Also, dependency libraries will not be found during linking or execution without +being found in the PATH environment variable. So, if you get errors related to missing +commands or libraries, double check the PATH variable. + +The following environment variables need to be added to your “System Variables”. Be sure to +check each folder path for accuracy, as your paths will likely be different than this list. + +### Example Variables + +* DL_DIR (`C:\libdl`) +* DXSDK_DIR (`C:\Program Files\Microsoft DirectX SDK (June 2010)\`) +* FFMPEGDIR (`C:\ffmpeg-git-95f163b-win32-dev`) +* FREETYPE_DIR (`C:\Program Files\GnuWin32`) +* HOME (`C:\msys\1.0\home`) +* LIBOPENSHOT_AUDIO_DIR (`C:\Program Files\libopenshot-audio`) +* QTDIR (`C:\qt5`) +* SNDFILE_DIR (`C:\Program Files\libsndfile`) +* UNITTEST_DIR (`C:\UnitTest++`) +* ZMQDIR (`C:\msys2\usr\local\`) +* PATH (`The following paths are an example`) + * C:\Qt5\bin; C:\Qt5\MinGW\bin\; C:\msys\1.0\local\lib; C:\Program Files\CMake 2.8\bin; C:\UnitTest++\build; C:\libopenshot\build\src; C:\Program Files\doxygen\bin; C:\ffmpeg-git-95f163b-win32-dev\lib; C:\swigwin-2.0.4; C:\Python33; C:\Program Files\Project\lib; C:\msys2\usr\local\ + + + + + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code +is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to +obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary + build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find + dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images + required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and + uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an + open-source JSON parser. + +## Install MSYS2 Dependencies + +Most Windows dependencies needed for libopenshot-audio, libopenshot, and openshot-qt +can be installed easily with MSYS2 and the pacman package manager. Follow these +directions to setup a Windows build environment for OpenShot. + +1) Install MSYS2: http://www.msys2.org/ + +2) Run MSYS2 command prompt (for example: `C:\msys64\msys2_shell.cmd`) + +3) Append PATH (so MSYS2 can find executables and libraries): + +``` +PATH=$PATH:/c/msys64/mingw64/bin:/c/msys64/mingw64/lib (64-bit PATH) + or +PATH=$PATH:/c/msys32/mingw32/bin:/c/msys32/mingw32/lib (32-bit PATH) +``` + +4) Update and upgrade all packages + +``` +pacman -Syu +``` + +5a) Install the following packages (**64-Bit**) + +``` +pacman -S --needed base-devel mingw-w64-x86_64-toolchain +pacman -S mingw64/mingw-w64-x86_64-ffmpeg +pacman -S mingw64/mingw-w64-x86_64-python3-pyqt5 +pacman -S mingw64/mingw-w64-x86_64-swig +pacman -S mingw64/mingw-w64-x86_64-cmake +pacman -S mingw64/mingw-w64-x86_64-doxygen +pacman -S mingw64/mingw-w64-x86_64-python3-pip +pacman -S mingw32/mingw-w64-i686-zeromq +pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq +pacman -S mingw64/mingw-w64-x86_64-python3-cx_Freeze +pacman -S git + +# Install ImageMagick if needed (OPTIONAL and NOT NEEDED) +pacman -S mingw64/mingw-w64-x86_64-imagemagick +``` + +5b) **Or** Install the following packages (**32-Bit**) + +``` +pacman -S --needed base-devel mingw32/mingw-w64-i686-toolchain +pacman -S mingw32/mingw-w64-i686-ffmpeg +pacman -S mingw32/mingw-w64-i686-python3-pyqt5 +pacman -S mingw32/mingw-w64-i686-swig +pacman -S mingw32/mingw-w64-i686-cmake +pacman -S mingw32/mingw-w64-i686-doxygen +pacman -S mingw32/mingw-w64-i686-python3-pip +pacman -S mingw32/mingw-w64-i686-zeromq +pacman -S mingw32/mingw-w64-i686-python3-pyzmq +pacman -S mingw32/mingw-w64-i686-python3-cx_Freeze +pacman -S git + +# Install ImageMagick if needed (OPTIONAL and NOT NEEDED) +pacman -S mingw32/mingw-w32-x86_32-imagemagick +``` + +6) Install Python PIP Dependencies + +``` +pip3 install httplib2 +pip3 install slacker +pip3 install tinys3 +pip3 install github3.py +pip3 install requests +``` + +7) Download Unittest++ (https://github.com/unittest-cpp/unittest-cpp) into /MSYS2/[USER]/unittest-cpp-master/ + +``` +cmake -G "MSYS Makefiles" ../ -DCMAKE_MAKE_PROGRAM=mingw32-make -DCMAKE_INSTALL_PREFIX:PATH=/usr +mingw32-make install +``` + +8) ZMQ++ Header (This might not be needed anymore) + NOTE: Download and copy zmq.hpp into the /c/msys64/mingw64/include/ folder + +## Manual Dependencies + +* ### DLfcn + * https://github.com/dlfcn-win32/dlfcn-win32 + * Download and Extract the Win32 Static (.tar.bz2) archive to a local folder: C:\libdl\ + * Create an environment variable called DL_DIR and set the value to C:\libdl\. This environment variable will be used by CMake to find the binary and header file. + +* ### DirectX SDK / Windows SDK + * Windows 7: (DirectX SDK) http://www.microsoft.com/download/en/details.aspx?displaylang=en&id=6812 + * Windows 8: (Windows SDK) + * https://msdn.microsoft.com/en-us/windows/desktop/aa904949 + * Download and Install the SDK Setup program. This is needed for the JUCE library to play audio on Windows. +Create an environment variable called DXSDK_DIR and set the value to C:\Program Files\Microsoft DirectX SDK (June 2010)\ (your path might be different). This environment variable will be used by CMake to find the binaries and header files. + +* ### libSndFile + * http://www.mega-nerd.com/libsndfile/#Download + * Download and Install the Win32 Setup program. + * Create an environment variable called SNDFILE_DIR and set the value to C:\Program Files\libsndfile. This environment variable will be used by CMake to find the binary and header files. + +* ### libzmq + * http://zeromq.org/intro:get-the-software + * Download source code (zip) + * Follow their instructions, and build with mingw + * Create an environment variable called ZMQDIR and set the value to C:\libzmq\build\ (the location of the compiled version). This environment variable will be used by CMake to find the binary and header files. + +## Windows Build Instructions (libopenshot-audio) +In order to compile libopenshot-audio, launch a command prompt and enter the following commands. This does not require the MSYS2 prompt, but it should work in both the Windows command prompt and the MSYS2 prompt. + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake -G “MinGW Makefiles” ../ +mingw32-make +mingw32-make install +openshot-audio-test-sound (This should play a test sound) +``` + +## Windows Build Instructions (libopenshot) +Run the following commands to build libopenshot: + +``` +cd [libopenshot repo folder] +mkdir build +cd build +cmake -G "MinGW Makefiles" -DPYTHON_INCLUDE_DIR="C:/Python34/include/" -DPYTHON_LIBRARY="C:/Python34/libs/libpython34.a" ../ +mingw32-make +``` + +If you are missing any dependencies for libopenshot, you will receive error messages at this point. +Just install the missing dependencies, and run the above commands again. Repeat until no error +messages are displayed and the build process completes. + +Also, if you are having trouble building, please see the CMake Flags section above, as +it might provide a solution for finding a missing folder path, missing Python 3 library, etc... + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +mingw32-make test +``` + +To auto-generate the documentation for libopenshot, launch a terminal, and enter: + +``` +mingw32-make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods +documented. The folder is located at build/doc/html/. Once libopenshot has been successfully +built, we need to install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +mingw32-make install +``` + +This should copy the binary files to C:\Program Files\openshot\lib\, and the header +files to C:\Program Files\openshot\include\... This is where other projects will +look for the libopenshot files when building.. Python 3 bindings are also installed +at this point. let's verify the python bindings work: + +``` +python3 +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on +your system. Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.gitlab-ci.yml libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.gitlab-ci.yml --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.gitlab-ci.yml 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.gitlab-ci.yml 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,129 @@ +stages: + - build-libopenshot + - trigger-openshot-qt + +linux-builder: + stage: build-libopenshot + artifacts: + expire_in: 6 months + paths: + - build/install-x64/* + script: + - "curl -O -J -L --header PRIVATE-TOKEN:$ACCESS_TOKEN http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=linux-builder" + - if [ ! -f artifacts.zip ]; then + - "curl -O -J -L --header PRIVATE-TOKEN:$ACCESS_TOKEN http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=linux-builder" + - fi + - unzip artifacts.zip + - export LIBOPENSHOT_AUDIO_DIR=$CI_PROJECT_DIR/build/install-x64 + - mkdir -p build; cd build; + - mkdir -p install-x64/python; + - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR/build/install-x64" -D"CMAKE_BUILD_TYPE:STRING=Release" ../ + - make + - make install + - mv /usr/local/lib/python3.4/dist-packages/*openshot* install-x64/python + - echo -e "CI_PROJECT_NAME:$CI_PROJECT_NAME\nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME\nCI_COMMIT_SHA:$CI_COMMIT_SHA\nCI_JOB_ID:$CI_JOB_ID" > "install-x64/share/$CI_PROJECT_NAME" + - git log $(git describe --tags --abbrev=0)..HEAD --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" + when: always + except: + - tags + tags: + - linux + +mac-builder: + stage: build-libopenshot + artifacts: + expire_in: 6 months + paths: + - build/install-x64/* + script: + - "curl -O -J -L --header PRIVATE-TOKEN:$ACCESS_TOKEN http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=mac-builder" + - if [ ! -f artifacts.zip ]; then + - "curl -O -J -L --header PRIVATE-TOKEN:$ACCESS_TOKEN http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=mac-builder" + - fi + - unzip artifacts.zip + - export LIBOPENSHOT_AUDIO_DIR=$CI_PROJECT_DIR/build/install-x64 + - mkdir -p build; cd build; + - mkdir -p install-x64/python; + - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR/build/install-x64" -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/qt5/5.5/clang_64 -DPYTHON_INCLUDE_DIR=/Library/Frameworks/Python.framework/Versions/3.6/include/python3.6m -DPYTHON_LIBRARY=/Library/Frameworks/Python.framework/Versions/3.6/lib/libpython3.6.dylib -DPython_FRAMEWORKS=/Library/Frameworks/Python.framework/ -D"CMAKE_BUILD_TYPE:STRING=Debug" -D"CMAKE_OSX_SYSROOT=/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.9.sdk" -D"CMAKE_OSX_DEPLOYMENT_TARGET=10.9" -D"CMAKE_INSTALL_RPATH_USE_LINK_PATH=1" -D"ENABLE_RUBY=0" ../ + - make + - make install + - mv /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/*openshot* install-x64/python + - echo -e "CI_PROJECT_NAME:$CI_PROJECT_NAME\nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME\nCI_COMMIT_SHA:$CI_COMMIT_SHA\nCI_JOB_ID:$CI_JOB_ID" > "install-x64/share/$CI_PROJECT_NAME" + - git log $(git describe --tags --abbrev=0)..HEAD --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" + when: always + except: + - tags + tags: + - mac + +windows-builder-x86: + stage: build-libopenshot + artifacts: + expire_in: 6 months + paths: + - build\install-x86\* + script: + - try { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } catch { $_.Exception.Response.StatusCode.Value__ } + - if (-not (Test-Path "artifacts.zip")) { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } + - Expand-Archive -Path artifacts.zip -DestinationPath . + - $env:LIBOPENSHOT_AUDIO_DIR = "$CI_PROJECT_DIR\build\install-x86" + - $env:UNITTEST_DIR = "C:\msys32\usr" + - $env:RESVGDIR = "C:\msys32\usr\local" + - $env:ZMQDIR = "C:\msys32\usr" + - $env:Path = "C:\msys32\mingw32\bin;C:\msys32\mingw32\lib;C:\msys32\usr\lib\cmake\UnitTest++;C:\msys32\home\jonathan\depot_tools;C:\msys32\usr;C:\msys32\usr\lib;" + $env:Path; + - New-Item -ItemType Directory -Force -Path build + - New-Item -ItemType Directory -Force -Path build\install-x86\python + - cd build + - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR\build\install-x86" -G "MinGW Makefiles" -D"CMAKE_BUILD_TYPE:STRING=Release" -D"CMAKE_CXX_FLAGS=-m32" -D"CMAKE_EXE_LINKER_FLAGS=-Wl,--large-address-aware" -D"CMAKE_C_FLAGS=-m32" ../ + - mingw32-make install + - Move-Item -Force -path "C:\msys32\mingw32\lib\python3.6\site-packages\*openshot*" -destination "install-x86\python\" + - cp src\libopenshot.dll install-x86\lib + - New-Item -path "install-x86/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force + - $PREV_GIT_LABEL=(git describe --tags --abbrev=0) + - git log "$PREV_GIT_LABEL..HEAD" --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x86/share/$CI_PROJECT_NAME.log" + when: always + except: + - tags + tags: + - windows + +windows-builder-x64: + stage: build-libopenshot + artifacts: + expire_in: 6 months + paths: + - build\install-x64\* + script: + - try { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=windows-builder-x64" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } catch { $_.Exception.Response.StatusCode.Value__ } + - if (-not (Test-Path "artifacts.zip")) { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=windows-builder-x64" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } + - Expand-Archive -Path artifacts.zip -DestinationPath . + - $env:LIBOPENSHOT_AUDIO_DIR = "$CI_PROJECT_DIR\build\install-x64" + - $env:UNITTEST_DIR = "C:\msys64\usr" + - $env:ZMQDIR = "C:\msys64\usr" + - $env:Path = "C:\msys64\mingw64\bin;C:\msys64\mingw64\lib;C:\msys64\usr\lib\cmake\UnitTest++;C:\msys64\home\jonathan\depot_tools;C:\msys64\usr;C:\msys64\usr\lib;" + $env:Path; + - New-Item -ItemType Directory -Force -Path build + - New-Item -ItemType Directory -Force -Path build\install-x64\python + - cd build + - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR\build\install-x64" -G "MinGW Makefiles" -D"CMAKE_BUILD_TYPE:STRING=Release" ../ + - mingw32-make install + - Move-Item -Force -path "C:\msys64\mingw64\lib\python3.6\site-packages\*openshot*" -destination "install-x64\python\" + - cp src\libopenshot.dll install-x64\lib + - New-Item -path "install-x64/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force + - $PREV_GIT_LABEL=(git describe --tags --abbrev=0) + - git log "$PREV_GIT_LABEL..HEAD" --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" + when: always + except: + - tags + tags: + - windows + +trigger-pipeline: + stage: trigger-openshot-qt + script: + - "curl -X POST -F token=$OPENSHOT_QT_PIPELINE_TOKEN -F ref=$CI_COMMIT_REF_NAME http://gitlab.openshot.org/api/v4/projects/3/trigger/pipeline" + when: always + dependencies: [] + except: + - tags + tags: + - gitlab diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/ChunkReader.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/ChunkReader.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/ChunkReader.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/ChunkReader.h 2019-03-21 07:31:31.000000000 +0000 @@ -29,8 +29,6 @@ #define OPENSHOT_CHUNK_READER_H #include "ReaderBase.h" -#include "FFmpegReader.h" - #include #include #include @@ -107,7 +105,7 @@ string path; bool is_open; int64_t chunk_size; - FFmpegReader *local_reader; + ReaderBase *local_reader; ChunkLocation previous_location; ChunkVersion version; std::shared_ptr last_frame; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/ClipBase.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/ClipBase.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/ClipBase.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/ClipBase.h 2019-03-21 07:31:31.000000000 +0000 @@ -58,8 +58,6 @@ float start; ///< The position in seconds to start playing (used to trim the beginning of a clip) float end; ///< The position in seconds to end playing (used to trim the ending of a clip) string previous_properties; ///< This string contains the previous JSON properties - int max_width; ///< The maximum image width needed by this clip (used for optimizations) - int max_height; ///< The maximium image height needed by this clip (used for optimizations) /// Generate JSON for a property Json::Value add_property_json(string name, float value, string type, string memo, Keyframe* keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame); @@ -70,7 +68,7 @@ public: /// Constructor for the base clip - ClipBase() { max_width = 0; max_height = 0; }; + ClipBase() { }; // Compare a clip using the Position() property bool operator< ( ClipBase& a) { return (Position() < a.Position()); } @@ -93,9 +91,6 @@ void Start(float value) { start = value; } ///< Set start position (in seconds) of clip (trim start of video) void End(float value) { end = value; } ///< Set end position (in seconds) of clip (trim end of video) - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height) { max_width = width; max_height = height; }; - /// Get and Set JSON methods virtual string Json() = 0; ///< Generate JSON string of this object virtual void SetJson(string value) = 0; ///< Load JSON string into this object diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Clip.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Clip.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Clip.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Clip.h 2019-03-21 07:31:31.000000000 +0000 @@ -44,18 +44,9 @@ #include "EffectBase.h" #include "Effects.h" #include "EffectInfo.h" -#include "FFmpegReader.h" #include "Fraction.h" -#include "FrameMapper.h" -#ifdef USE_IMAGEMAGICK - #include "ImageReader.h" - #include "TextReader.h" -#endif -#include "QtImageReader.h" -#include "ChunkReader.h" #include "KeyFrame.h" #include "ReaderBase.h" -#include "DummyReader.h" using namespace std; using namespace openshot; @@ -136,11 +127,14 @@ std::shared_ptr GetOrCreateFrame(int64_t number); /// Adjust the audio and image of a time mapped frame - std::shared_ptr get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number); + void get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number); /// Init default settings for a clip void init_settings(); + /// Update default rotation from reader + void init_reader_rotation(); + /// Sort effects by order void sort_effects(); @@ -152,6 +146,7 @@ ScaleType scale; ///< The scale determines how a clip should be resized to fit it's parent AnchorType anchor; ///< The anchor determines what parent a clip should snap to FrameDisplayType display; ///< The format to display the frame number (if any) + VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips /// Default Constructor Clip(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Coordinate.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Coordinate.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Coordinate.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Coordinate.h 2019-03-21 07:31:31.000000000 +0000 @@ -52,11 +52,6 @@ * \endcode */ class Coordinate { - private: - bool increasing; ///< Is the Y value increasing or decreasing? - Fraction repeated; ///< Fraction of repeated Y values (for example, 1/3 would be the first Y value of 3 repeated values) - double delta; ///< This difference in Y value (from the previous unique Y value) - public: double X; ///< The X value of the coordinate (usually representing the frame #) double Y; ///< The Y value of the coordinate (usually representing the value of the property being animated) @@ -69,27 +64,6 @@ /// @param y The Y coordinate (usually representing the value of the property being animated) Coordinate(double x, double y); - /// @brief Set the repeating Fraction (used internally on the timeline, to track changes to coordinates) - /// @param is_repeated The fraction representing how many times this coordinate Y value repeats (only used on the timeline) - void Repeat(Fraction is_repeated) { repeated=is_repeated; } - - /// Get the repeating Fraction (used internally on the timeline, to track changes to coordinates) - Fraction Repeat() { return repeated; } - - /// @brief Set the increasing flag (used internally on the timeline, to track changes to coordinates) - /// @param is_increasing Indicates if this coorindate Y value is increasing (when compared to the previous coordinate) - void IsIncreasing(bool is_increasing) { increasing = is_increasing; } - - /// Get the increasing flag (used internally on the timeline, to track changes to coordinates) - bool IsIncreasing() { return increasing; } - - /// @brief Set the delta / difference between previous coordinate value (used internally on the timeline, to track changes to coordinates) - /// @param new_delta Indicates how much this Y value differs from the previous Y value - void Delta(double new_delta) { delta=new_delta; } - - /// Get the delta / difference between previous coordinate value (used internally on the timeline, to track changes to coordinates) - float Delta() { return delta; } - /// Get and Set JSON methods string Json(); ///< Generate JSON string of this object Json::Value JsonValue(); ///< Generate Json::JsonValue for this object diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/CrashHandler.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/CrashHandler.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/CrashHandler.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/CrashHandler.h 2019-03-21 07:31:31.000000000 +0000 @@ -53,13 +53,15 @@ class CrashHandler { private: /// Default constructor - CrashHandler(){}; // Don't allow user to create an instance of this singleton + CrashHandler(){return;}; // Don't allow user to create an instance of this singleton /// Default copy method - CrashHandler(CrashHandler const&){}; // Don't allow the user to copy this instance + //CrashHandler(CrashHandler const&){}; // Don't allow the user to copy this instance + CrashHandler(CrashHandler const&) = delete; // Don't allow the user to copy this instance /// Default assignment operator - CrashHandler & operator=(CrashHandler const&){}; // Don't allow the user to assign this instance + //CrashHandler & operator=(CrashHandler const&){}; // Don't allow the user to assign this instance + CrashHandler & operator=(CrashHandler const&) = delete; // Don't allow the user to assign this instance /// Private variable to keep track of singleton instance static CrashHandler *m_pInstance; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/DecklinkInput.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/DecklinkInput.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/DecklinkInput.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/DecklinkInput.h 2019-03-21 07:31:31.000000000 +0000 @@ -62,9 +62,9 @@ #include #include "DeckLinkAPI.h" -#include "../include/Frame.h" +#include "Frame.h" #include "CacheMemory.h" -#include "../include/OpenMPUtilities.h" +#include "OpenMPUtilities.h" /// Implementation of the Blackmagic Decklink API (used by the DecklinkReader) class DeckLinkInputDelegate : public IDeckLinkInputCallback diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/DecklinkOutput.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/DecklinkOutput.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/DecklinkOutput.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/DecklinkOutput.h 2019-03-21 07:31:31.000000000 +0000 @@ -63,8 +63,8 @@ #include "DeckLinkAPI.h" #include "CacheMemory.h" -#include "../include/Frame.h" -#include "../include/OpenMPUtilities.h" +#include "Frame.h" +#include "OpenMPUtilities.h" enum OutputSignal { kOutputSignalPip = 0, diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/DecklinkReader.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/DecklinkReader.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/DecklinkReader.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/DecklinkReader.h 2019-03-21 07:31:31.000000000 +0000 @@ -56,7 +56,7 @@ * @brief This class uses the Blackmagic Decklink libraries, to open video streams on Blackmagic devices. * * This requires special hardware manufactured by Blackmagic Designs. - * Once the device is aquired and connected, this reader returns openshot::Frame objects containing the image and audio data. + * Once the device is acquired and connected, this reader returns openshot::Frame objects containing the image and audio data. */ class DecklinkReader : public ReaderBase { diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/DecklinkWriter.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/DecklinkWriter.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/DecklinkWriter.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/DecklinkWriter.h 2019-03-21 07:31:31.000000000 +0000 @@ -56,7 +56,7 @@ * @brief This class uses the Blackmagic Decklink libraries, to send video streams to Blackmagic devices. * * This requires special hardware manufactured by Blackmagic Designs. - * Once the device is aquired and connected, this reader returns openshot::Frame objects containing the image and audio data. + * Once the device is acquired and connected, this reader returns openshot::Frame objects containing the image and audio data. */ class DecklinkWriter : public WriterBase { diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/EffectBase.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/EffectBase.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/EffectBase.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/EffectBase.h 2019-03-21 07:31:31.000000000 +0000 @@ -75,6 +75,9 @@ /// Display effect information in the standard output stream (stdout) void DisplayInfo(); + /// Constrain a color value from 0 to 255 + int constrain(int color_value); + /// @brief This method is required for all derived classes of EffectBase, and returns a /// modified openshot::Frame object /// diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Bars.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Bars.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Bars.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Bars.h 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,103 @@ +/** + * @file + * @brief Header file for Bars effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_BARS_EFFECT_H +#define OPENSHOT_BARS_EFFECT_H + +#include "../EffectBase.h" + +#include +#include +#include +#include "../Color.h" +#include "../Json.h" +#include "../KeyFrame.h" + + +using namespace std; + +namespace openshot +{ + + /** + * @brief This class draws black bars around your video (from any side), and can be animated with + * openshot::Keyframe curves over time. + * + * Adding bars around your video can be done for cinematic reasons, and creates a fun way to frame + * in the focal point of a scene. The bars can be any color, and each side can be animated independently. + */ + class Bars : public EffectBase + { + private: + /// Init effect settings + void init_effect_details(); + + + public: + Color color; ///< Color of bars + Keyframe left; ///< Size of left bar + Keyframe top; ///< Size of top bar + Keyframe right; ///< Size of right bar + Keyframe bottom; ///< Size of bottom bar + + /// Blank constructor, useful when using Json to load the effect properties + Bars(); + + /// Default constructor, which takes 4 curves and a color. These curves animated the bars over time. + /// + /// @param color The curve to adjust the color of bars + /// @param left The curve to adjust the left bar size (between 0 and 1) + /// @param top The curve to adjust the top bar size (between 0 and 1) + /// @param right The curve to adjust the right bar size (between 0 and 1) + /// @param bottom The curve to adjust the bottom bar size (between 0 and 1) + Bars(Color color, Keyframe left, Keyframe top, Keyframe right, Keyframe bottom); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from it's keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); + + /// Get and Set JSON methods + string Json(); ///< Generate JSON string of this object + void SetJson(string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::JsonValue for this object + void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + string PropertiesJSON(int64_t requested_frame); + }; + +} + +#endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Brightness.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Brightness.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Brightness.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Brightness.h 2019-03-21 07:31:31.000000000 +0000 @@ -59,9 +59,6 @@ class Brightness : public EffectBase { private: - /// Constrain a color value from 0 to 255 - int constrain(int color_value); - /// Init effect settings void init_effect_details(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/ChromaKey.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/ChromaKey.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/ChromaKey.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/ChromaKey.h 2019-03-21 07:31:31.000000000 +0000 @@ -70,7 +70,7 @@ /// more colors are matched. /// /// @param color The color to match - /// @param fuzz The fuzz factor (or threshhold) + /// @param fuzz The fuzz factor (or threshold) ChromaKey(Color color, Keyframe fuzz); /// @brief This method is required for all derived classes of EffectBase, and returns a diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/ColorShift.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/ColorShift.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/ColorShift.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/ColorShift.h 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,107 @@ +/** + * @file + * @brief Header file for Color Shift effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_COLOR_SHIFT_EFFECT_H +#define OPENSHOT_COLOR_SHIFT_EFFECT_H + +#include "../EffectBase.h" + +#include +#include +#include +#include "../Json.h" +#include "../KeyFrame.h" + + +using namespace std; + +namespace openshot +{ + + /** + * @brief This class shifts the pixels of an image up, down, left, or right, and can be animated + * with openshot::Keyframe curves over time. + * + * Shifting pixels can be used in many interesting ways, especially when animating the movement of the pixels. + * The pixels wrap around the image (the pixels drop off one side and appear on the other side of the image). + */ + class ColorShift : public EffectBase + { + private: + /// Init effect settings + void init_effect_details(); + + public: + Keyframe red_x; ///< Shift the Red X coordinates (left or right) + Keyframe red_y; ///< Shift the Red Y coordinates (up or down) + Keyframe green_x; ///< Shift the Green X coordinates (left or right) + Keyframe green_y; ///< Shift the Green Y coordinates (up or down) + Keyframe blue_x; ///< Shift the Blue X coordinates (left or right) + Keyframe blue_y; ///< Shift the Blue Y coordinates (up or down) + Keyframe alpha_x; ///< Shift the Alpha X coordinates (left or right) + Keyframe alpha_y; ///< Shift the Alpha Y coordinates (up or down) + + /// Blank constructor, useful when using Json to load the effect properties + ColorShift(); + + /// Default constructor, which takes 8 curves. The curves will shift the RGBA pixels up, down, left, or right + /// + /// @param red_x The curve to adjust the Red x shift (between -1 and 1, percentage) + /// @param red_y The curve to adjust the Red y shift (between -1 and 1, percentage) + /// @param green_x The curve to adjust the Green x shift (between -1 and 1, percentage) + /// @param green_y The curve to adjust the Green y shift (between -1 and 1, percentage) + /// @param blue_x The curve to adjust the Blue x shift (between -1 and 1, percentage) + /// @param blue_y The curve to adjust the Blue y shift (between -1 and 1, percentage) + /// @param alpha_x The curve to adjust the Alpha x shift (between -1 and 1, percentage) + /// @param alpha_y The curve to adjust the Alpha y shift (between -1 and 1, percentage) + ColorShift(Keyframe red_x, Keyframe red_y, Keyframe green_x, Keyframe green_y, Keyframe blue_x, Keyframe blue_y, Keyframe alpha_x, Keyframe alpha_y); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from it's keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); + + /// Get and Set JSON methods + string Json(); ///< Generate JSON string of this object + void SetJson(string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::JsonValue for this object + void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + string PropertiesJSON(int64_t requested_frame); + }; + +} + +#endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Crop.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Crop.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Crop.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Crop.h 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,103 @@ +/** + * @file + * @brief Header file for Crop effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_CROP_EFFECT_H +#define OPENSHOT_CROP_EFFECT_H + +#include "../EffectBase.h" + +#include +#include +#include +#include "../Color.h" +#include "../Json.h" +#include "../KeyFrame.h" + + +using namespace std; + +namespace openshot +{ + + /** + * @brief This class crops a frame image (from any side), and can be animated with + * openshot::Keyframe curves over time. + * + * Cropping images can be useful when wanting to remove a border around an image or video, and animating + * the crop can create some very interesting effects. + */ + class Crop : public EffectBase + { + private: + /// Init effect settings + void init_effect_details(); + + + public: + Color color; ///< Color of bars + Keyframe left; ///< Size of left bar + Keyframe top; ///< Size of top bar + Keyframe right; ///< Size of right bar + Keyframe bottom; ///< Size of bottom bar + + /// Blank constructor, useful when using Json to load the effect properties + Crop(); + + /// Default constructor, which takes 4 curves. These curves animate the crop over time. + /// + /// @param color The curve to adjust the color of bars + /// @param left The curve to adjust the left bar size (between 0 and 1) + /// @param top The curve to adjust the top bar size (between 0 and 1) + /// @param right The curve to adjust the right bar size (between 0 and 1) + /// @param bottom The curve to adjust the bottom bar size (between 0 and 1) + Crop(Keyframe left, Keyframe top, Keyframe right, Keyframe bottom); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from it's keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); + + /// Get and Set JSON methods + string Json(); ///< Generate JSON string of this object + void SetJson(string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::JsonValue for this object + void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + string PropertiesJSON(int64_t requested_frame); + }; + +} + +#endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Hue.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Hue.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Hue.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Hue.h 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,93 @@ +/** + * @file + * @brief Header file for Hue effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_HUE_EFFECT_H +#define OPENSHOT_HUE_EFFECT_H + +#include "../EffectBase.h" + +#include +#include +#include +#include "../Json.h" +#include "../KeyFrame.h" + + +using namespace std; + +namespace openshot +{ + + /** + * @brief This class shifts the hue of an image, and can be animated with openshot::Keyframe curves over time. + * + * Shifting hue can adjust the colors in an image towards red, blue, green, or anywhere in between. Animating hue + * can create some fun and interesting effects, but can also be used to change the mood of a scene, etc... + */ + class Hue : public EffectBase + { + private: + /// Init effect settings + void init_effect_details(); + + + public: + Keyframe hue; ///< Shift the hue coordinates (left or right) + + /// Blank constructor, useful when using Json to load the effect properties + Hue(); + + /// Default constructor, which takes 1 curve. The curves will shift the hue of the image. + /// + /// @param hue The curve to adjust the hue shift (between 0 and 1) + Hue(Keyframe hue); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from it's keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); + + /// Get and Set JSON methods + string Json(); ///< Generate JSON string of this object + void SetJson(string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::JsonValue for this object + void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + string PropertiesJSON(int64_t requested_frame); + }; + +} + +#endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Mask.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Mask.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Mask.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Mask.h 2019-03-21 07:31:31.000000000 +0000 @@ -65,12 +65,7 @@ private: ReaderBase *reader; std::shared_ptr original_mask; - - /// Constrain a color value from 0 to 255 - int constrain(int color_value); - - /// Get grayscale mask image - void set_grayscale_mask(std::shared_ptr mask_frame_image, int width, int height, float brightness, float contrast); + bool needs_refresh; /// Init effect settings void init_effect_details(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Pixelate.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Pixelate.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Pixelate.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Pixelate.h 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,102 @@ +/** + * @file + * @brief Header file for Pixelate effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_PIXELATE_EFFECT_H +#define OPENSHOT_PIXELATE_EFFECT_H + +#include "../EffectBase.h" + +#include +#include +#include +#include "../Color.h" +#include "../Json.h" +#include "../KeyFrame.h" + + +using namespace std; + +namespace openshot +{ + + /** + * @brief This class pixelates an image, and can be animated with openshot::Keyframe curves over time. + * + * Pixelating the image is the process of increasing the size of visible pixels, thus loosing visual + * clarity of the image. The area to pixelate can be set and animated with keyframes also. + */ + class Pixelate : public EffectBase + { + private: + /// Init effect settings + void init_effect_details(); + + + public: + Keyframe pixelization; ///< Amount of pixelization + Keyframe left; ///< Size of left margin + Keyframe top; ///< Size of top margin + Keyframe right; ///< Size of right margin + Keyframe bottom; ///< Size of bottom margin + + /// Blank constructor, useful when using Json to load the effect properties + Pixelate(); + + /// Default constructor, which takes 5 curves. These curves animate the pixelization effect over time. + /// + /// @param pixelization The curve to adjust the amount of pixelization (0 to 1) + /// @param left The curve to adjust the left margin size (between 0 and 1) + /// @param top The curve to adjust the top margin size (between 0 and 1) + /// @param right The curve to adjust the right margin size (between 0 and 1) + /// @param bottom The curve to adjust the bottom margin size (between 0 and 1) + Pixelate(Keyframe pixelization, Keyframe left, Keyframe top, Keyframe right, Keyframe bottom); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from it's keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); + + /// Get and Set JSON methods + string Json(); ///< Generate JSON string of this object + void SetJson(string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::JsonValue for this object + void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + string PropertiesJSON(int64_t requested_frame); + }; + +} + +#endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Saturation.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Saturation.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Saturation.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Saturation.h 2019-03-21 07:31:31.000000000 +0000 @@ -59,9 +59,6 @@ class Saturation : public EffectBase { private: - /// Constrain a color value from 0 to 255 - int constrain(int color_value); - /// Init effect settings void init_effect_details(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Shift.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Shift.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Shift.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Shift.h 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,96 @@ +/** + * @file + * @brief Header file for Shift effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_SHIFT_EFFECT_H +#define OPENSHOT_SHIFT_EFFECT_H + +#include "../EffectBase.h" + +#include +#include +#include +#include "../Json.h" +#include "../KeyFrame.h" + + +using namespace std; + +namespace openshot +{ + + /** + * @brief This class shifts the pixels of an image up, down, left, or right, and can be animated + * with openshot::Keyframe curves over time. + * + * Shifting pixels can be used in many interesting ways, especially when animating the movement of the pixels. + * The pixels wrap around the image (the pixels drop off one side and appear on the other side of the image). + */ + class Shift : public EffectBase + { + private: + /// Init effect settings + void init_effect_details(); + + + public: + Keyframe x; ///< Shift the X coordinates (left or right) + Keyframe y; ///< Shift the Y coordinates (up or down) + + /// Blank constructor, useful when using Json to load the effect properties + Shift(); + + /// Default constructor, which takes 2 curve. The curves will shift the pixels up, down, left, or right + /// + /// @param x The curve to adjust the x shift (between -1 and 1, percentage) + /// @param y The curve to adjust the y shift (between -1 and 1, percentage) + Shift(Keyframe x, Keyframe y); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from it's keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); + + /// Get and Set JSON methods + string Json(); ///< Generate JSON string of this object + void SetJson(string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::JsonValue for this object + void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + string PropertiesJSON(int64_t requested_frame); + }; + +} + +#endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Wave.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Wave.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/effects/Wave.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/effects/Wave.h 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,102 @@ +/** + * @file + * @brief Header file for Wave effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_WAVE_EFFECT_H +#define OPENSHOT_WAVE_EFFECT_H + +#include "../EffectBase.h" + +#include +#include +#include +#include "../Json.h" +#include "../KeyFrame.h" + + +using namespace std; + +namespace openshot +{ + + /** + * @brief This class distorts an image using a wave pattern + * + * Distoring an image with a wave can be used to simulate analog transmissions, and other effects, and each + * value of the wave computation can be animated with an openshot::Keyframe curves over time. + */ + class Wave : public EffectBase + { + private: + //unsigned char *perm; + + /// Init effect settings + void init_effect_details(); + + public: + Keyframe wavelength; ///< The length of the wave + Keyframe amplitude; ///< The height of the wave + Keyframe multiplier; ///< Amount to multiply the wave (make it bigger) + Keyframe shift_x; ///< Amount to shift X-axis + Keyframe speed_y; ///< Speed of the wave on the Y-axis + + /// Blank constructor, useful when using Json to load the effect properties + Wave(); + + /// Default constructor, which takes 5 curves. The curves will distort the image. + /// + /// @param wavelength The curve to adjust the wavelength (0.0 to 3.0) + /// @param amplitude The curve to adjust the amplitude (0.0 to 5.0) + /// @param multiplier The curve to adjust the multiplier (0.0 to 1.0) + /// @param shift_x The curve to shift pixels along the x-axis (0 to 100) + /// @param speed_y The curve to adjust the vertical speed (0 to 10) + Wave(Keyframe wavelength, Keyframe amplitude, Keyframe multiplier, Keyframe shift_x, Keyframe speed_y); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from it's keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); + + /// Get and Set JSON methods + string Json(); ///< Generate JSON string of this object + void SetJson(string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::JsonValue for this object + void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + string PropertiesJSON(int64_t requested_frame); + }; + +} + +#endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Effects.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Effects.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Effects.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Effects.h 2019-03-21 07:31:31.000000000 +0000 @@ -29,13 +29,20 @@ */ /* Effects */ +#include "effects/Bars.h" #include "effects/Blur.h" #include "effects/Brightness.h" #include "effects/ChromaKey.h" +#include "effects/ColorShift.h" +#include "effects/Crop.h" #include "effects/Deinterlace.h" +#include "effects/Hue.h" #include "effects/Mask.h" #include "effects/Negate.h" +#include "effects/Pixelate.h" #include "effects/Saturation.h" +#include "effects/Shift.h" +#include "effects/Wave.h" #endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Enums.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Enums.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Enums.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Enums.h 2019-03-21 07:31:31.000000000 +0000 @@ -69,5 +69,13 @@ FRAME_DISPLAY_TIMELINE, ///< Display the timeline's frame number FRAME_DISPLAY_BOTH ///< Display both the clip's and timeline's frame number }; + + /// This enumeration determines the strategy when mixing audio with other clips. + enum VolumeMixType + { + VOLUME_MIX_NONE, ///< Do not apply any volume mixing adjustments. Just add the samples together. + VOLUME_MIX_AVERAGE, ///< Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100% + VOLUME_MIX_REDUCE ///< Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%) + }; } #endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/FFmpegReader.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/FFmpegReader.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/FFmpegReader.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/FFmpegReader.h 2019-03-21 07:31:31.000000000 +0000 @@ -42,8 +42,10 @@ #include #include #include "CacheMemory.h" +#include "Clip.h" #include "Exceptions.h" #include "OpenMPUtilities.h" +#include "Settings.h" using namespace std; @@ -99,7 +101,7 @@ AVCodecContext *pCodecCtx, *aCodecCtx; AVStream *pStream, *aStream; AVPacket *packet; - AVPicture *pFrame; + AVFrame *pFrame; bool is_open; bool is_duration_known; bool check_interlace; @@ -154,9 +156,6 @@ /// Check the working queue, and move finished frames to the finished queue void CheckWorkingFrames(bool end_of_stream, int64_t requested_frame); - /// Convert image to RGB format - void convert_image(int64_t current_frame, AVPicture *copyFrame, int width, int height, PixelFormat pix_fmt); - /// Convert Frame Number into Audio PTS int64_t ConvertFrameToAudioPTS(int64_t frame_number); @@ -200,7 +199,7 @@ std::shared_ptr ReadStream(int64_t requested_frame); /// Remove AVFrame from cache (and deallocate it's memory) - void RemoveAVFrame(AVPicture*); + void RemoveAVFrame(AVFrame*); /// Remove AVPacket from cache (and deallocate it's memory) void RemoveAVPacket(AVPacket*); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/FFmpegUtilities.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/FFmpegUtilities.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/FFmpegUtilities.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/FFmpegUtilities.h 2019-03-21 07:31:31.000000000 +0000 @@ -34,12 +34,24 @@ #define UINT64_C(c) (c ## ULL) #endif + #ifndef IS_FFMPEG_3_2 + #define IS_FFMPEG_3_2 (LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 64, 101)) + #endif + // Include the FFmpeg headers extern "C" { #include #include #include + // Change this to the first version swrescale works + #if (LIBAVFORMAT_VERSION_MAJOR >= 57) + #define USE_SW + #endif + #ifdef USE_SW + #include + #else #include + #endif #include #include #include @@ -55,6 +67,10 @@ #if LIBAVFORMAT_VERSION_MAJOR >= 54 #include #endif + + #if IS_FFMPEG_3_2 + #include "libavutil/imgutils.h" + #endif } // This was removed from newer versions of FFmpeg (but still used in libopenshot) @@ -98,16 +114,155 @@ #define PIX_FMT_YUV420P AV_PIX_FMT_YUV420P #endif - #if LIBAVFORMAT_VERSION_MAJOR >= 55 + #ifdef USE_SW + #define SWR_CONVERT(ctx, out, linesize, out_count, in, linesize2, in_count) \ + swr_convert(ctx, out, out_count, (const uint8_t **)in, in_count) + #define SWR_ALLOC() swr_alloc() + #define SWR_CLOSE(ctx) {} + #define SWR_FREE(ctx) swr_free(ctx) + #define SWR_INIT(ctx) swr_init(ctx) + #define SWRCONTEXT SwrContext + #else + #define SWR_CONVERT(ctx, out, linesize, out_count, in, linesize2, in_count) \ + avresample_convert(ctx, out, linesize, out_count, (uint8_t **)in, linesize2, in_count) + #define SWR_ALLOC() avresample_alloc_context() + #define SWR_CLOSE(ctx) avresample_close(ctx) + #define SWR_FREE(ctx) avresample_free(ctx) + #define SWR_INIT(ctx) avresample_open(ctx) + #define SWRCONTEXT AVAudioResampleContext + #endif + + + #if (LIBAVFORMAT_VERSION_MAJOR >= 58) + #define AV_REGISTER_ALL + #define AVCODEC_REGISTER_ALL + #define AV_FILENAME url + #define MY_INPUT_BUFFER_PADDING_SIZE AV_INPUT_BUFFER_PADDING_SIZE + #define AV_ALLOCATE_FRAME() av_frame_alloc() + #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) av_image_alloc(av_frame->data, av_frame->linesize, width, height, pix_fmt, 1) + #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) + #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) + #define AV_FREE_PACKET(av_packet) av_packet_unref(av_packet) + #define AV_FREE_CONTEXT(av_context) avcodec_free_context(&av_context) + #define AV_GET_CODEC_TYPE(av_stream) av_stream->codecpar->codec_type + #define AV_FIND_DECODER_CODEC_ID(av_stream) av_stream->codecpar->codec_id + auto AV_GET_CODEC_CONTEXT = [](AVStream* av_stream, AVCodec* av_codec) { \ + AVCodecContext *context = avcodec_alloc_context3(av_codec); \ + avcodec_parameters_to_context(context, av_stream->codecpar); \ + return context; \ + }; + #define AV_GET_CODEC_PAR_CONTEXT(av_stream, av_codec) av_codec; + #define AV_GET_CODEC_FROM_STREAM(av_stream,codec_in) + #define AV_GET_CODEC_ATTRIBUTES(av_stream, av_context) av_stream->codecpar + #define AV_GET_CODEC_PIXEL_FORMAT(av_stream, av_context) (AVPixelFormat) av_stream->codecpar->format + #define AV_GET_SAMPLE_FORMAT(av_stream, av_context) av_stream->codecpar->format + #define AV_GET_IMAGE_SIZE(pix_fmt, width, height) av_image_get_buffer_size(pix_fmt, width, height, 1) + #define AV_COPY_PICTURE_DATA(av_frame, buffer, pix_fmt, width, height) av_image_fill_arrays(av_frame->data, av_frame->linesize, buffer, pix_fmt, width, height, 1) + #define AV_OUTPUT_CONTEXT(output_context, path) avformat_alloc_output_context2( output_context, NULL, NULL, path) + #define AV_OPTION_FIND(priv_data, name) av_opt_find(priv_data, name, NULL, 0, 0) + #define AV_OPTION_SET( av_stream, priv_data, name, value, avcodec) av_opt_set(priv_data, name, value, 0); avcodec_parameters_from_context(av_stream->codecpar, avcodec); + #define AV_FORMAT_NEW_STREAM(oc, st_codec, av_codec, av_st) av_st = avformat_new_stream(oc, NULL);\ + if (!av_st) \ + throw OutOfMemory("Could not allocate memory for the video stream.", path); \ + c = avcodec_alloc_context3(av_codec); \ + st_codec = c; \ + av_st->codecpar->codec_id = av_codec->id; + #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) avcodec_parameters_from_context(av_stream->codecpar, av_codec); + #elif IS_FFMPEG_3_2 + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE + #define AV_ALLOCATE_FRAME() av_frame_alloc() + #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) av_image_alloc(av_frame->data, av_frame->linesize, width, height, pix_fmt, 1) + #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) + #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) + #define AV_FREE_PACKET(av_packet) av_packet_unref(av_packet) + #define AV_FREE_CONTEXT(av_context) avcodec_free_context(&av_context) + #define AV_GET_CODEC_TYPE(av_stream) av_stream->codecpar->codec_type + #define AV_FIND_DECODER_CODEC_ID(av_stream) av_stream->codecpar->codec_id + auto AV_GET_CODEC_CONTEXT = [](AVStream* av_stream, AVCodec* av_codec) { \ + AVCodecContext *context = avcodec_alloc_context3(av_codec); \ + avcodec_parameters_to_context(context, av_stream->codecpar); \ + return context; \ + }; + #define AV_GET_CODEC_PAR_CONTEXT(av_stream, av_codec) av_codec; + #define AV_GET_CODEC_FROM_STREAM(av_stream,codec_in) + #define AV_GET_CODEC_ATTRIBUTES(av_stream, av_context) av_stream->codecpar + #define AV_GET_CODEC_PIXEL_FORMAT(av_stream, av_context) (AVPixelFormat) av_stream->codecpar->format + #define AV_GET_SAMPLE_FORMAT(av_stream, av_context) av_stream->codecpar->format + #define AV_GET_IMAGE_SIZE(pix_fmt, width, height) av_image_get_buffer_size(pix_fmt, width, height, 1) + #define AV_COPY_PICTURE_DATA(av_frame, buffer, pix_fmt, width, height) av_image_fill_arrays(av_frame->data, av_frame->linesize, buffer, pix_fmt, width, height, 1) + #define AV_OUTPUT_CONTEXT(output_context, path) avformat_alloc_output_context2( output_context, NULL, NULL, path) + #define AV_OPTION_FIND(priv_data, name) av_opt_find(priv_data, name, NULL, 0, 0) + #define AV_OPTION_SET( av_stream, priv_data, name, value, avcodec) av_opt_set(priv_data, name, value, 0); avcodec_parameters_from_context(av_stream->codecpar, avcodec); + #define AV_FORMAT_NEW_STREAM(oc, st_codec, av_codec, av_st) av_st = avformat_new_stream(oc, NULL);\ + if (!av_st) \ + throw OutOfMemory("Could not allocate memory for the video stream.", path); \ + c = avcodec_alloc_context3(av_codec); \ + st_codec = c; \ + av_st->codecpar->codec_id = av_codec->id; + #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) avcodec_parameters_from_context(av_stream->codecpar, av_codec); + #elif LIBAVFORMAT_VERSION_MAJOR >= 55 + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() av_frame_alloc() + #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) avpicture_alloc((AVPicture *) av_frame, pix_fmt, width, height) #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) #define AV_FREE_PACKET(av_packet) av_packet_unref(av_packet) + #define AV_FREE_CONTEXT(av_context) avcodec_close(av_context) + #define AV_GET_CODEC_TYPE(av_stream) av_stream->codec->codec_type + #define AV_FIND_DECODER_CODEC_ID(av_stream) av_stream->codec->codec_id + #define AV_GET_CODEC_CONTEXT(av_stream, av_codec) av_stream->codec + #define AV_GET_CODEC_PAR_CONTEXT(av_stream, av_codec) av_stream->codec + #define AV_GET_CODEC_FROM_STREAM(av_stream, codec_in) codec_in = av_stream->codec; + #define AV_GET_CODEC_ATTRIBUTES(av_stream, av_context) av_context + #define AV_GET_CODEC_PIXEL_FORMAT(av_stream, av_context) av_context->pix_fmt + #define AV_GET_SAMPLE_FORMAT(av_stream, av_context) av_context->sample_fmt + #define AV_GET_IMAGE_SIZE(pix_fmt, width, height) avpicture_get_size(pix_fmt, width, height) + #define AV_COPY_PICTURE_DATA(av_frame, buffer, pix_fmt, width, height) avpicture_fill((AVPicture *) av_frame, buffer, pix_fmt, width, height) + #define AV_OUTPUT_CONTEXT(output_context, path) oc = avformat_alloc_context() + #define AV_OPTION_FIND(priv_data, name) av_opt_find(priv_data, name, NULL, 0, 0) + #define AV_OPTION_SET(av_stream, priv_data, name, value, avcodec) av_opt_set (priv_data, name, value, 0) + #define AV_FORMAT_NEW_STREAM( oc, av_context, av_codec, av_st) av_st = avformat_new_stream(oc, av_codec); \ + if (!av_st) \ + throw OutOfMemory("Could not allocate memory for the video stream.", path); \ + avcodec_get_context_defaults3(av_st->codec, av_codec); \ + c = av_st->codec; + #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) #else + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() avcodec_alloc_frame() + #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) avpicture_alloc((AVPicture *) av_frame, pix_fmt, width, height) #define AV_RESET_FRAME(av_frame) avcodec_get_frame_defaults(av_frame) #define AV_FREE_FRAME(av_frame) avcodec_free_frame(av_frame) #define AV_FREE_PACKET(av_packet) av_free_packet(av_packet) + #define AV_FREE_CONTEXT(av_context) avcodec_close(av_context) + #define AV_GET_CODEC_TYPE(av_stream) av_stream->codec->codec_type + #define AV_FIND_DECODER_CODEC_ID(av_stream) av_stream->codec->codec_id + #define AV_GET_CODEC_CONTEXT(av_stream, av_codec) av_stream->codec + #define AV_GET_CODEC_PAR_CONTEXT(av_stream, av_codec) av_stream->codec + #define AV_GET_CODEC_FROM_STREAM(av_stream, codec_in ) codec_in = av_stream->codec; + #define AV_GET_CODEC_ATTRIBUTES(av_stream, av_context) av_context + #define AV_GET_CODEC_PIXEL_FORMAT(av_stream, av_context) av_context->pix_fmt + #define AV_GET_SAMPLE_FORMAT(av_stream, av_context) av_context->sample_fmt + #define AV_GET_IMAGE_SIZE(pix_fmt, width, height) avpicture_get_size(pix_fmt, width, height) + #define AV_COPY_PICTURE_DATA(av_frame, buffer, pix_fmt, width, height) avpicture_fill((AVPicture *) av_frame, buffer, pix_fmt, width, height) + #define AV_OUTPUT_CONTEXT(output_context, path) oc = avformat_alloc_context() + #define AV_OPTION_FIND(priv_data, name) av_opt_find(priv_data, name, NULL, 0, 0) + #define AV_OPTION_SET(av_stream, priv_data, name, value, avcodec) av_opt_set (priv_data, name, value, 0) + #define AV_FORMAT_NEW_STREAM( oc, av_context, av_codec, av_st) av_st = avformat_new_stream(oc, av_codec); \ + if (!av_st) \ + throw OutOfMemory("Could not allocate memory for the video stream.", path); \ + avcodec_get_context_defaults3(av_st->codec, av_codec); \ + c = av_st->codec; + #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) #endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/FFmpegWriter.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/FFmpegWriter.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/FFmpegWriter.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/FFmpegWriter.h 2019-03-21 07:31:31.000000000 +0000 @@ -51,6 +51,7 @@ #include "Exceptions.h" #include "OpenMPUtilities.h" #include "ZmqLogger.h" +#include "Settings.h" using namespace std; @@ -174,8 +175,8 @@ int initial_audio_input_frame_size; int audio_input_position; int audio_encoder_buffer_size; - AVAudioResampleContext *avr; - AVAudioResampleContext *avr_planar; + SWRCONTEXT *avr; + SWRCONTEXT *avr_planar; /* Resample options */ int original_sample_rate; @@ -258,6 +259,9 @@ /// Determine if writer is open or closed bool IsOpen() { return is_open; }; + /// Determine if codec name is valid + static bool IsValidCodec(string codec_name); + /// Open writer void Open(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Frame.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Frame.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Frame.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Frame.h 2019-03-21 07:31:31.000000000 +0000 @@ -62,7 +62,7 @@ #include "AudioResampler.h" #include "Fraction.h" - +#pragma SWIG nowarn=362 using namespace std; namespace openshot @@ -128,6 +128,8 @@ int width; int height; int sample_rate; + string color; + int64_t max_audio_sample; ///< The max audio sample count added to this frame /// Constrain a color value from 0 to 255 int constrain(int color_value); @@ -137,6 +139,7 @@ bool has_audio_data; ///< This frame has been loaded with audio data bool has_image_data; ///< This frame has been loaded with pixel data + /// Constructor - blank frame (300x200 blank image, 48kHz audio silence) Frame(); @@ -153,13 +156,13 @@ Frame ( const Frame &other ); /// Assignment operator - //Frame& operator= (const Frame& other); + Frame& operator= (const Frame& other); /// Destructor ~Frame(); /// Add (or replace) pixel data to the frame (based on a solid color) - void AddColor(int new_width, int new_height, string color); + void AddColor(int new_width, int new_height, string new_color); /// Add (or replace) pixel data to the frame void AddImage(int new_width, int new_height, int bytes_per_pixel, QImage::Format type, const unsigned char *pixels_); @@ -285,7 +288,7 @@ /// Thumbnail the frame image with tons of options to the specified path. The image format is determined from the extension (i.e. image.PNG, image.JPEG). /// This method allows for masks, overlays, background color, and much more accurate resizing (including padding and centering) void Thumbnail(string path, int new_width, int new_height, string mask_path, string overlay_path, - string background_color, bool ignore_aspect, string format="png", int quality=100); + string background_color, bool ignore_aspect, string format="png", int quality=100, float rotate=0.0); /// Play audio samples for this frame void Play(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/FrameMapper.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/FrameMapper.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/FrameMapper.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/FrameMapper.h 2019-03-21 07:31:31.000000000 +0000 @@ -34,11 +34,11 @@ #include #include #include "CacheMemory.h" -#include "../include/ReaderBase.h" -#include "../include/Frame.h" -#include "../include/Fraction.h" -#include "../include/Exceptions.h" -#include "../include/KeyFrame.h" +#include "ReaderBase.h" +#include "Frame.h" +#include "Fraction.h" +#include "Exceptions.h" +#include "KeyFrame.h" // Include FFmpeg headers and macros @@ -146,8 +146,7 @@ ReaderBase *reader; // The source video reader CacheMemory final_cache; // Cache of actual Frame objects bool is_dirty; // When this is true, the next call to GetFrame will re-init the mapping - AVAudioResampleContext *avr; // Audio resampling context object - int64_t timeline_frame_offset; // Timeline frame offset + SWRCONTEXT *avr; // Audio resampling context object // Internal methods used by init void AddField(int64_t frame); @@ -176,9 +175,6 @@ /// Change frame rate or audio mapping details void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout); - // Set offset relative to parent timeline - void SetTimelineFrameOffset(int64_t offset); - /// Close the openshot::FrameMapper and internal reader void Close(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/OpenMPUtilities.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/OpenMPUtilities.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/OpenMPUtilities.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/OpenMPUtilities.h 2019-03-21 07:31:31.000000000 +0000 @@ -29,8 +29,14 @@ #define OPENSHOT_OPENMP_UTILITIES_H #include +#include +#include + +// Calculate the # of OpenMP and FFmpeg Threads to allow. We are limiting both +// of these based on our own performance tests (more is not always better). +#define OPEN_MP_NUM_PROCESSORS (min(omp_get_num_procs(), 6)) +#define FF_NUM_PROCESSORS (min(omp_get_num_procs(), 12)) + - // Calculate the # of OpenMP Threads to allow - #define OPEN_MP_NUM_PROCESSORS omp_get_num_procs() #endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/OpenShot.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/OpenShot.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/OpenShot.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/OpenShot.h 2019-03-21 07:31:31.000000000 +0000 @@ -134,5 +134,6 @@ #include "Profiles.h" #include "QtImageReader.h" #include "Timeline.h" +#include "Settings.h" #endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/PlayerBase.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/PlayerBase.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/PlayerBase.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/PlayerBase.h 2019-03-21 07:31:31.000000000 +0000 @@ -29,7 +29,7 @@ #define OPENSHOT_PLAYER_BASE_H #include -#include "../include/ReaderBase.h" +#include "ReaderBase.h" using namespace std; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Qt/AudioPlaybackThread.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Qt/AudioPlaybackThread.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Qt/AudioPlaybackThread.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Qt/AudioPlaybackThread.h 2019-03-21 07:31:31.000000000 +0000 @@ -29,9 +29,9 @@ #ifndef OPENSHOT_AUDIO_PLAYBACK_THREAD_H #define OPENSHOT_AUDIO_PLAYBACK_THREAD_H -#include "../../include/ReaderBase.h" -#include "../../include/RendererBase.h" -#include "../../include/AudioReaderSource.h" +#include "../ReaderBase.h" +#include "../RendererBase.h" +#include "../AudioReaderSource.h" namespace openshot { diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Qt/PlayerPrivate.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Qt/PlayerPrivate.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Qt/PlayerPrivate.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Qt/PlayerPrivate.h 2019-03-21 07:31:31.000000000 +0000 @@ -29,12 +29,12 @@ #ifndef OPENSHOT_PLAYER_PRIVATE_H #define OPENSHOT_PLAYER_PRIVATE_H -#include "../../include/ReaderBase.h" -#include "../../include/RendererBase.h" -#include "../../include/AudioReaderSource.h" -#include "../../include/Qt/AudioPlaybackThread.h" -#include "../../include/Qt/VideoPlaybackThread.h" -#include "../../include/Qt/VideoCacheThread.h" +#include "../ReaderBase.h" +#include "../RendererBase.h" +#include "../AudioReaderSource.h" +#include "../Qt/AudioPlaybackThread.h" +#include "../Qt/VideoPlaybackThread.h" +#include "../Qt/VideoCacheThread.h" namespace openshot { diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Qt/VideoCacheThread.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Qt/VideoCacheThread.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Qt/VideoCacheThread.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Qt/VideoCacheThread.h 2019-03-21 07:31:31.000000000 +0000 @@ -28,9 +28,9 @@ #ifndef OPENSHOT_VIDEO_CACHE_THREAD_H #define OPENSHOT_VIDEO_CACHE_THREAD_H -#include "../../include/OpenMPUtilities.h" -#include "../../include/ReaderBase.h" -#include "../../include/RendererBase.h" +#include "../OpenMPUtilities.h" +#include "../ReaderBase.h" +#include "../RendererBase.h" namespace openshot { diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Qt/VideoPlaybackThread.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Qt/VideoPlaybackThread.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Qt/VideoPlaybackThread.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Qt/VideoPlaybackThread.h 2019-03-21 07:31:31.000000000 +0000 @@ -29,8 +29,8 @@ #ifndef OPENSHOT_VIDEO_PLAYBACK_THREAD_H #define OPENSHOT_VIDEO_PLAYBACK_THREAD_H -#include "../../include/ReaderBase.h" -#include "../../include/RendererBase.h" +#include "../ReaderBase.h" +#include "../RendererBase.h" namespace openshot { diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/QtImageReader.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/QtImageReader.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/QtImageReader.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/QtImageReader.h 2019-03-21 07:31:31.000000000 +0000 @@ -28,19 +28,14 @@ #ifndef OPENSHOT_QIMAGE_READER_H #define OPENSHOT_QIMAGE_READER_H -#include "ReaderBase.h" - #include #include #include #include #include #include -#include -#include -#include -#include "CacheMemory.h" #include "Exceptions.h" +#include "ReaderBase.h" using namespace std; @@ -110,9 +105,6 @@ Json::Value JsonValue(); ///< Generate Json::JsonValue for this object void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height); - /// Open File - which is called by the constructor automatically void Open(); }; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/QtPlayer.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/QtPlayer.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/QtPlayer.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/QtPlayer.h 2019-03-21 07:31:31.000000000 +0000 @@ -31,9 +31,9 @@ #include #include -#include "../include/PlayerBase.h" -#include "../include/Qt/PlayerPrivate.h" -#include "../include/RendererBase.h" +#include "PlayerBase.h" +#include "Qt/PlayerPrivate.h" +#include "RendererBase.h" using namespace std; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/ReaderBase.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/ReaderBase.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/ReaderBase.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/ReaderBase.h 2019-03-21 07:31:31.000000000 +0000 @@ -35,6 +35,7 @@ #include #include "CacheMemory.h" #include "ChannelLayouts.h" +#include "ClipBase.h" #include "Fraction.h" #include "Frame.h" #include "Json.h" @@ -83,6 +84,7 @@ ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...) int audio_stream_index; ///< The index of the audio stream Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played + std::map metadata; ///< An optional map/dictionary of metadata for this reader }; /** @@ -98,9 +100,7 @@ /// Section lock for multiple threads CriticalSection getFrameCriticalSection; CriticalSection processingCriticalSection; - - int max_width; ///< The maximum image width needed by this clip (used for optimizations) - int max_height; ///< The maximium image height needed by this clip (used for optimizations) + ClipBase* parent; public: @@ -110,6 +110,12 @@ /// Information about the current media file ReaderInfo info; + /// Parent clip object of this reader (which can be unparented and NULL) + ClipBase* GetClip(); + + /// Set parent clip object of this reader + void SetClip(ClipBase* clip); + /// Close the reader (and any resources it was consuming) virtual void Close() = 0; @@ -139,9 +145,6 @@ virtual Json::Value JsonValue() = 0; ///< Generate Json::JsonValue for this object virtual void SetJsonValue(Json::Value root) = 0; ///< Load Json::JsonValue into this object - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height) { max_width = width; max_height = height; }; - /// Open the reader (and start consuming resources, such as images or video files) virtual void Open() = 0; }; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/RendererBase.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/RendererBase.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/RendererBase.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/RendererBase.h 2019-03-21 07:31:31.000000000 +0000 @@ -28,7 +28,7 @@ #ifndef OPENSHOT_RENDERER_BASE_H #define OPENSHOT_RENDERER_BASE_H -#include "../include/Frame.h" +#include "Frame.h" #include // for realloc #include diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Settings.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Settings.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Settings.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Settings.h 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,103 @@ +/** + * @file + * @brief Header file for global Settings class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_SETTINGS_H +#define OPENSHOT_SETTINGS_H + + +#include "JuceLibraryCode/JuceHeader.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +using namespace std; + +namespace openshot { + + /** + * @brief This class is contains settings used by libopenshot (and can be safely toggled at any point) + * + * Settings class is used primarily to toggle scale settings between preview and rendering, and adjust + * other runtime related settings. + */ + class Settings { + private: + + /// Default constructor + Settings(){}; // Don't allow user to create an instance of this singleton + +#if __GNUC__ >=7 + /// Default copy method + Settings(Settings const&) = delete; // Don't allow the user to assign this instance + + /// Default assignment operator + Settings & operator=(Settings const&) = delete; // Don't allow the user to assign this instance +#else + /// Default copy method + Settings(Settings const&) {}; // Don't allow the user to assign this instance + + /// Default assignment operator + Settings & operator=(Settings const&); // Don't allow the user to assign this instance +#endif + + /// Private variable to keep track of singleton instance + static Settings * m_pInstance; + + public: + /// Use video card for faster video decoding (if supported) + bool HARDWARE_DECODE = false; + + /// Use video card for faster video encoding (if supported) + bool HARDWARE_ENCODE = false; + + /// Scale mode used in FFmpeg decoding and encoding (used as an optimization for faster previews) + bool HIGH_QUALITY_SCALING = false; + + /// Maximum width for image data (useful for optimzing for a smaller preview or render) + int MAX_WIDTH = 0; + + /// Maximum height for image data (useful for optimzing for a smaller preview or render) + int MAX_HEIGHT = 0; + + /// Wait for OpenMP task to finish before continuing (used to limit threads on slower systems) + bool WAIT_FOR_VIDEO_PROCESSING_TASK = false; + + /// Create or get an instance of this logger singleton (invoke the class with this method) + static Settings * Instance(); + }; + +} + +#endif diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Timeline.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Timeline.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Timeline.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Timeline.h 2019-03-21 07:31:31.000000000 +0000 @@ -48,6 +48,7 @@ #include "KeyFrame.h" #include "OpenMPUtilities.h" #include "ReaderBase.h" +#include "Settings.h" using namespace std; using namespace openshot; @@ -153,7 +154,7 @@ CacheBase *final_cache; /// new_frame, Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number, bool is_top_clip); + void add_layer(std::shared_ptr new_frame, Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number, bool is_top_clip, float max_volume); /// Apply a FrameMapper to a clip which matches the settings of this timeline void apply_mapper_to_clip(Clip* clip); @@ -265,6 +266,10 @@ Json::Value JsonValue(); ///< Generate Json::JsonValue for this object void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + /// Set Max Image Size (used for performance optimization). Convenience function for setting + /// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT. + void SetMaxSize(int width, int height); + /// @brief Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) /// This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync /// with another application... such as OpenShot Video Editor (http://www.openshot.org). diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Version.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Version.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/Version.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/Version.h 2019-03-21 07:31:31.000000000 +0000 @@ -35,9 +35,9 @@ #endif #define OPENSHOT_VERSION_MAJOR 0; /// Major version number is incremented when huge features are added or improved. -#define OPENSHOT_VERSION_MINOR 1; /// Minor version is incremented when smaller (but still very important) improvements are added. -#define OPENSHOT_VERSION_BUILD 9; /// Build number is incremented when minor bug fixes and less important improvements are added. -#define OPENSHOT_VERSION_SO 14; /// Shared object version number. This increments any time the API and ABI changes (so old apps will no longer link) +#define OPENSHOT_VERSION_MINOR 2; /// Minor version is incremented when smaller (but still very important) improvements are added. +#define OPENSHOT_VERSION_BUILD 3; /// Build number is incremented when minor bug fixes and less important improvements are added. +#define OPENSHOT_VERSION_SO 17; /// Shared object version number. This increments any time the API and ABI changes (so old apps will no longer link) #define OPENSHOT_VERSION_MAJOR_MINOR STRINGIZE(OPENSHOT_VERSION_MAJOR) "." STRINGIZE(OPENSHOT_VERSION_MINOR); /// A string of the "Major.Minor" version #define OPENSHOT_VERSION_ALL STRINGIZE(OPENSHOT_VERSION_MAJOR) "." STRINGIZE(OPENSHOT_VERSION_MINOR) "." STRINGIZE(OPENSHOT_VERSION_BUILD); /// A string of the entire version "Major.Minor.Build" diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/WriterBase.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/WriterBase.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/WriterBase.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/WriterBase.h 2019-03-21 07:31:31.000000000 +0000 @@ -73,6 +73,7 @@ ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...) int audio_stream_index; ///< The index of the audio stream Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played + std::map metadata; ///< An optional map/dictionary of video & audio metadata }; /** diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/ZmqLogger.h libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/ZmqLogger.h --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/include/ZmqLogger.h 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/include/ZmqLogger.h 2019-03-21 07:31:31.000000000 +0000 @@ -47,11 +47,10 @@ namespace openshot { /** - * @brief This abstract class is the base class, used by all readers in libopenshot. + * @brief This class is used for logging and sending those logs over a ZemoMQ socket to a listener * - * Readers are types of classes that read video, audio, and image files, and - * return openshot::Frame objects. The only requirements for a 'reader', are to - * derive from this base class, implement the GetFrame method, and call the InitFileInfo() method. + * OpenShot desktop editor listens to this port, to receive libopenshot debug output. It both logs to + * a file and sends the stdout over a socket. */ class ZmqLogger { private: @@ -72,11 +71,19 @@ /// Default constructor ZmqLogger(){}; // Don't allow user to create an instance of this singleton +#if __GNUC__ >=7 /// Default copy method - ZmqLogger(ZmqLogger const&){}; // Don't allow the user to copy this instance + ZmqLogger(ZmqLogger const&) = delete; // Don't allow the user to assign this instance /// Default assignment operator - ZmqLogger & operator=(ZmqLogger const&){}; // Don't allow the user to assign this instance + ZmqLogger & operator=(ZmqLogger const&) = delete; // Don't allow the user to assign this instance +#else + /// Default copy method + ZmqLogger(ZmqLogger const&) {}; // Don't allow the user to assign this instance + + /// Default assignment operator + ZmqLogger & operator=(ZmqLogger const&); // Don't allow the user to assign this instance +#endif /// Private variable to keep track of singleton instance static ZmqLogger * m_pInstance; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/INSTALL.md libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/INSTALL.md --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/INSTALL.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/INSTALL.md 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,153 @@ +## Detailed Install Instructions + +Operating system specific install instructions are located in: + +* doc/INSTALL-LINUX.md +* doc/INSTALL-MAC.md +* doc/INSTALL-WINDOWS.md + +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for dependencies, +locates header files and libraries, generates makefiles, and supports the cross-platform compiling of +libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where all temporary build +files, such as makefiles, object files, and even the final binaries, are created outside of the source +code folder, inside a /build/ sub-folder. This prevents the build process from cluttering up the source +code. These instructions have only been tested with the GNU compiler (including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is compiled. Some of these flags might be required when compiling on certain OSes, just depending on how your build environment is setup. To add a build flag, follow this general syntax: $ cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../ + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Linux Build Instructions (libopenshot-audio) +To compile libopenshot-audio, we need to go through a few additional steps to manually build and install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake ../ +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Linux Build Instructions (libopenshot) +Run the following commands to compile libopenshot: + +``` +cd [libopenshot repo directory] +mkdir -p build +cd build +cmake ../ +make +make install +``` + +For more detailed instructions, please see: + +* doc/INSTALL-LINUX.md +* doc/INSTALL-MAC.md +* doc/INSTALL-WINDOWS.md diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.pc/Fix-python-module-install-directory.patch/src/bindings/python/CMakeLists.txt libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.pc/Fix-python-module-install-directory.patch/src/bindings/python/CMakeLists.txt --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.pc/Fix-python-module-install-directory.patch/src/bindings/python/CMakeLists.txt 2017-11-23 22:40:14.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.pc/Fix-python-module-install-directory.patch/src/bindings/python/CMakeLists.txt 2019-03-21 07:31:37.000000000 +0000 @@ -55,12 +55,11 @@ GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) SET(PYTHON_MODULE_PATH ${_REL_PYTHON_MODULE_PATH}) - - + ############### INSTALL HEADERS & LIBRARY ################ ### Install Python bindings INSTALL(TARGETS _openshot DESTINATION ${PYTHON_MODULE_PATH} ) INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/openshot.py DESTINATION ${PYTHON_MODULE_PATH} ) ENDIF(PYTHONINTERP_FOUND) -ENDIF (PYTHONLIBS_FOUND) \ No newline at end of file +ENDIF (PYTHONLIBS_FOUND) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.pc/.quilt_patches libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.pc/.quilt_patches --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.pc/.quilt_patches 2017-11-23 22:40:14.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.pc/.quilt_patches 2019-03-21 07:31:37.000000000 +0000 @@ -1 +1 @@ -/home/buildd/build-RECIPEBRANCHBUILD-1488990/chroot-autobuild/home/buildd/work/tree/libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/patches +/home/buildd/build-RECIPEBRANCHBUILD-2298211/chroot-autobuild/home/buildd/work/tree/recipe/debian/patches diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.pc/.quilt_series libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.pc/.quilt_series --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.pc/.quilt_series 2017-11-23 22:40:14.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.pc/.quilt_series 2019-03-21 07:31:37.000000000 +0000 @@ -1 +1 @@ -/home/buildd/build-RECIPEBRANCHBUILD-1488990/chroot-autobuild/home/buildd/work/tree/libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/debian/patches/series +/home/buildd/build-RECIPEBRANCHBUILD-2298211/chroot-autobuild/home/buildd/work/tree/recipe/debian/patches/series diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.project libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.project --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.project 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.project 1970-01-01 00:00:00.000000000 +0000 @@ -1,94 +0,0 @@ - - - libopenshot - - - - - - com.aptana.ide.core.unifiedBuilder - - - - - org.python.pydev.PyDevBuilder - - - - - org.eclipse.cdt.managedbuilder.core.genmakebuilder - clean,full,incremental, - - - ?name? - - - - org.eclipse.cdt.make.core.append_environment - true - - - org.eclipse.cdt.make.core.autoBuildTarget - all - - - org.eclipse.cdt.make.core.buildArguments - - - - org.eclipse.cdt.make.core.buildCommand - make - - - org.eclipse.cdt.make.core.buildLocation - ${workspace_loc:/libopenshot/build} - - - org.eclipse.cdt.make.core.cleanBuildTarget - clean - - - org.eclipse.cdt.make.core.contents - org.eclipse.cdt.make.core.activeConfigSettings - - - org.eclipse.cdt.make.core.enableAutoBuild - false - - - org.eclipse.cdt.make.core.enableCleanBuild - true - - - org.eclipse.cdt.make.core.enableFullBuild - true - - - org.eclipse.cdt.make.core.fullBuildTarget - all - - - org.eclipse.cdt.make.core.stopOnError - true - - - org.eclipse.cdt.make.core.useDefaultBuildCmd - true - - - - - org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder - - - - - - org.eclipse.cdt.core.cnature - org.eclipse.cdt.core.ccnature - org.eclipse.cdt.managedbuilder.core.managedBuildNature - org.eclipse.cdt.managedbuilder.core.ScannerConfigNature - org.python.pydev.pythonNature - com.aptana.ruby.core.rubynature - - diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/README libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/README --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/README 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -#################################################################### - OpenShot Library -#################################################################### - -OpenShot Library (libopenshot) is an open-source project dedicated to -delivering high quality video editing, animation, and playback solutions -to the world. For more information visit . - -#################################################################### - License -#################################################################### - -Copyright (c) 2008-2014 OpenShot Studios, LLC -. - -OpenShot Library (libopenshot) is free software: you can redistribute it -and/or modify it under the terms of the GNU Lesser General Public License -as published by the Free Software Foundation, either version 3 of the -License, or (at your option) any later version. - -OpenShot Library (libopenshot) is distributed in the hope that it will be -useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with OpenShot Library. If not, see . - -#################################################################### - - To release a closed-source product which uses libopenshot (i.e. video - editing and playback), commercial licenses are available: contact - sales@openshot.org for more information. - - -#################################################################### - Install -#################################################################### - -Please see /doc/InstallationGuide.pdf for a very detailed -Linux, Mac, and Windows compiling instruction guide. An online version -is also available: -https://docs.google.com/document/d/1V6nq-IuS9zxqO1-OSt8iTS_cw_HMCpsUNofHLYtUNjM/pub - - -#################################################################### - Documentation -#################################################################### - -Documentation is auto-generated by Doxygen, and can be created with -$ make doc (Also available online: ) - - -#################################################################### - Authors -#################################################################### - -Please see AUTHORS file for a full list of authors. - - -#################################################################### - www.openshot.org | www.openshotstudios.com -#################################################################### - - Copyright (c) 2008-2014 OpenShot Studios, LLC - . diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/README.md libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/README.md --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/README.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/README.md 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,85 @@ +OpenShot Video Library (libopenshot) is a free, open-source C++ library dedicated to +delivering high quality video editing, animation, and playback solutions to the +world. + +## Build Status + +[![Build Status](https://img.shields.io/travis/OpenShot/libopenshot/develop.svg?label=libopenshot)](https://travis-ci.org/OpenShot/libopenshot) [![Build Status](https://img.shields.io/travis/OpenShot/libopenshot-audio/develop.svg?label=libopenshot-audio)](https://travis-ci.org/OpenShot/libopenshot-audio) + +## Features + +* Cross-Platform (Linux, Mac, and Windows) +* Multi-Layer Compositing +* Video and Audio Effects (Chroma Key, Color Adjustment, Grayscale, etc…) +* Animation Curves (Bézier, Linear, Constant) +* Time Mapping (Curve-based Slow Down, Speed Up, Reverse) +* Audio Mixing & Resampling (Curve-based) +* Audio Plug-ins (VST & AU) +* Audio Drivers (ASIO, WASAPI, DirectSound, CoreAudio, iPhone Audio, ALSA, JACK, and Android) +* Telecine and Inverse Telecine (Film to TV, TV to Film) +* Frame Rate Conversions +* Multi-Processor Support (Performance) +* Python and Ruby Bindings (All Features Supported) +* Qt Video Player Included (Ability to display video on any QWidget) +* Unit Tests (Stability) +* All FFmpeg Formats and Codecs Supported (Images, Videos, and Audio files) +* Full Documentation with Examples (Doxygen Generated) + +## Install + +Detailed instructions for building libopenshot and libopenshot-audio for each OS. These instructions +are also available in the /docs/ source folder. + + * [Linux](https://github.com/OpenShot/libopenshot/wiki/Linux-Build-Instructions) + * [Mac](https://github.com/OpenShot/libopenshot/wiki/Mac-Build-Instructions) + * [Windows](https://github.com/OpenShot/libopenshot/wiki/Windows-Build-Instructions) + +## Documentation + +Beautiful HTML documentation can be generated using Doxygen. +``` +make doc +``` +(Also available online: http://openshot.org/files/libopenshot/) + +## Developers + +Are you interested in becoming more involved in the development of +OpenShot? Build exciting new features, fix bugs, make friends, and become a hero! +Please read the [step-by-step](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer) +instructions for getting source code, configuring dependencies, and building OpenShot. + +## Report a bug + +You can report a new libopenshot issue directly on GitHub: + +https://github.com/OpenShot/libopenshot/issues + +## Websites + +- https://www.openshot.org/ (Official website and blog) +- https://github.com/OpenShot/libopenshot/ (source code and issue tracker) +- https://github.com/OpenShot/libopenshot-audio/ (source code for audio library) +- https://github.com/OpenShot/openshot-qt/ (source code for Qt client) +- https://launchpad.net/openshot/ + +### License + +Copyright (c) 2008-2019 OpenShot Studios, LLC. + +OpenShot Library (libopenshot) is free software: you can redistribute it +and/or modify it under the terms of the GNU Lesser General Public License +as published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +OpenShot Library (libopenshot) is distributed in the hope that it will be +useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with OpenShot Library. If not, see http://www.gnu.org/licenses/. + +To release a closed-source product which uses libopenshot (i.e. video +editing and playback), commercial licenses are also available: contact +sales@openshot.org for more information. diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/bindings/python/openshot.i libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/bindings/python/openshot.i --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/bindings/python/openshot.i 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/bindings/python/openshot.i 2019-03-21 07:31:31.000000000 +0000 @@ -35,6 +35,7 @@ %include "std_string.i" %include "std_list.i" %include "std_vector.i" +%include "std_map.i" %include /* Unhandled STL Exception Handling */ @@ -83,6 +84,7 @@ #include "../../../include/QtPlayer.h" #include "../../../include/KeyFrame.h" #include "../../../include/RendererBase.h" +#include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" @@ -149,6 +151,7 @@ %include "../../../include/QtPlayer.h" %include "../../../include/KeyFrame.h" %include "../../../include/RendererBase.h" +%include "../../../include/Settings.h" %include "../../../include/Timeline.h" %include "../../../include/ZmqLogger.h" @@ -159,13 +162,20 @@ #endif /* Effects */ +%include "../../../include/effects/Bars.h" %include "../../../include/effects/Blur.h" %include "../../../include/effects/Brightness.h" %include "../../../include/effects/ChromaKey.h" +%include "../../../include/effects/ColorShift.h" +%include "../../../include/effects/Crop.h" %include "../../../include/effects/Deinterlace.h" +%include "../../../include/effects/Hue.h" %include "../../../include/effects/Mask.h" %include "../../../include/effects/Negate.h" +%include "../../../include/effects/Pixelate.h" %include "../../../include/effects/Saturation.h" +%include "../../../include/effects/Shift.h" +%include "../../../include/effects/Wave.h" /* Wrap std templates (list, vector, etc...) */ @@ -176,4 +186,5 @@ %template(PointsVector) vector; %template(FieldVector) vector; %template(MappedFrameVector) vector; + %template(MappedMetadata) map; } diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/bindings/ruby/openshot.i libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/bindings/ruby/openshot.i --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/bindings/ruby/openshot.i 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/bindings/ruby/openshot.i 2019-03-21 07:31:31.000000000 +0000 @@ -35,17 +35,17 @@ %include "std_string.i" %include "std_list.i" %include "std_vector.i" +%include "std_map.i" /* Unhandled STL Exception Handling */ %include namespace std { -{ template class shared_ptr { public: T *operator->(); }; -}} +} /* Mark these classes as shared_ptr classes */ #ifdef USE_IMAGEMAGICK @@ -88,6 +88,7 @@ #include "../../../include/QtPlayer.h" #include "../../../include/KeyFrame.h" #include "../../../include/RendererBase.h" +#include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" @@ -143,6 +144,7 @@ %include "../../../include/QtPlayer.h" %include "../../../include/KeyFrame.h" %include "../../../include/RendererBase.h" +%include "../../../include/Settings.h" %include "../../../include/Timeline.h" %include "../../../include/ZmqLogger.h" @@ -152,14 +154,22 @@ %include "../../../include/TextReader.h" #endif + /* Effects */ +%include "../../../include/effects/Bars.h" %include "../../../include/effects/Blur.h" %include "../../../include/effects/Brightness.h" %include "../../../include/effects/ChromaKey.h" +%include "../../../include/effects/ColorShift.h" +%include "../../../include/effects/Crop.h" %include "../../../include/effects/Deinterlace.h" +%include "../../../include/effects/Hue.h" %include "../../../include/effects/Mask.h" %include "../../../include/effects/Negate.h" +%include "../../../include/effects/Pixelate.h" %include "../../../include/effects/Saturation.h" +%include "../../../include/effects/Shift.h" +%include "../../../include/effects/Wave.h" /* Wrap std templates (list, vector, etc...) */ @@ -170,4 +180,5 @@ %template(PointsVector) vector; %template(FieldVector) vector; %template(MappedFrameVector) vector; + %template(MappedMetadata) map; } diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/CacheDisk.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/CacheDisk.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/CacheDisk.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/CacheDisk.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -471,7 +471,7 @@ // Generate Json::JsonValue for this object Json::Value CacheDisk::JsonValue() { - // Proccess range data (if anything has changed) + // Process range data (if anything has changed) CalculateRanges(); // Create root json object diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/CacheMemory.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/CacheMemory.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/CacheMemory.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/CacheMemory.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -327,7 +327,7 @@ // Generate Json::JsonValue for this object Json::Value CacheMemory::JsonValue() { - // Proccess range data (if anything has changed) + // Process range data (if anything has changed) CalculateRanges(); // Create root json object diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/ChunkReader.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/ChunkReader.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/ChunkReader.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/ChunkReader.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -26,6 +26,7 @@ */ #include "../include/ChunkReader.h" +#include "../include/FFmpegReader.h" using namespace openshot; @@ -227,7 +228,6 @@ cout << "Load READER: " << chunk_video_path << endl; // Load new FFmpegReader local_reader = new FFmpegReader(chunk_video_path); - local_reader->enable_seek = false; // disable seeking local_reader->Open(); // open reader } catch (InvalidFile) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Clip.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Clip.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Clip.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Clip.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -26,6 +26,15 @@ */ #include "../include/Clip.h" +#include "../include/FFmpegReader.h" +#include "../include/FrameMapper.h" +#ifdef USE_IMAGEMAGICK + #include "../include/ImageReader.h" + #include "../include/TextReader.h" +#endif +#include "../include/QtImageReader.h" +#include "../include/ChunkReader.h" +#include "../include/DummyReader.h" using namespace openshot; @@ -41,6 +50,7 @@ scale = SCALE_FIT; anchor = ANCHOR_CANVAS; display = FRAME_DISPLAY_NONE; + mixing = VOLUME_MIX_NONE; waveform = false; previous_properties = ""; @@ -52,9 +62,11 @@ location_x = Keyframe(0.0); location_y = Keyframe(0.0); - // Init alpha & rotation + // Init alpha alpha = Keyframe(1.0); - rotation = Keyframe(0.0); + + // Init rotation + init_reader_rotation(); // Init time & volume time = Keyframe(1.0); @@ -91,28 +103,46 @@ has_video = Keyframe(-1.0); // Default pointers - reader = NULL; - resampler = NULL; - audio_cache = NULL; manage_reader = false; } +// Init reader's rotation (if any) +void Clip::init_reader_rotation() { + // Only init rotation from reader when needed + if (rotation.Points.size() > 1) + // Do nothing if more than 1 rotation Point + return; + else if (rotation.Points.size() == 1 && rotation.GetValue(1) != 0.0) + // Do nothing if 1 Point, and it's not the default value + return; + + // Init rotation + if (reader && reader->info.metadata.count("rotate") > 0) { + // Use reader metadata rotation (if any) + // This is typical with cell phone videos filmed in different orientations + try { + float rotate_metadata = strtof(reader->info.metadata["rotate"].c_str(), 0); + rotation = Keyframe(rotate_metadata); + } catch (exception e) {} + } + else + // Default no rotation + rotation = Keyframe(0.0); +} + // Default Constructor for a clip -Clip::Clip() +Clip::Clip() : reader(NULL), resampler(NULL), audio_cache(NULL) { // Init all default settings init_settings(); } // Constructor with reader -Clip::Clip(ReaderBase* new_reader) +Clip::Clip(ReaderBase* new_reader) : reader(new_reader), resampler(NULL), audio_cache(NULL) { // Init all default settings init_settings(); - // Set the reader - reader = new_reader; - // Open and Close the reader (to set the duration of the clip) Open(); Close(); @@ -122,7 +152,7 @@ } // Constructor with filepath -Clip::Clip(string path) +Clip::Clip(string path) : reader(NULL), resampler(NULL), audio_cache(NULL) { // Init all default settings init_settings(); @@ -165,6 +195,7 @@ if (reader) { End(reader->info.duration); manage_reader = true; + init_reader_rotation(); } } @@ -189,6 +220,12 @@ { // set reader pointer reader = new_reader; + + // set parent + reader->SetClip(this); + + // Init rotation (if any) + init_reader_rotation(); } /// Get the current reader @@ -303,13 +340,13 @@ frame->AddAudio(true, channel, 0, original_frame->GetAudioSamples(channel), original_frame->GetAudioSamplesCount(), 1.0); // Get time mapped frame number (used to increase speed, change direction, etc...) - std::shared_ptr new_frame = get_time_mapped_frame(frame, requested_frame); + get_time_mapped_frame(frame, requested_frame); // Apply effects to the frame (if any) - apply_effects(new_frame); + apply_effects(frame); // Return processed 'frame' - return new_frame; + return frame; } else // Throw error if reader not initialized @@ -352,7 +389,7 @@ } // Adjust the audio and image of a time mapped frame -std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number) +void Clip::get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number) { // Check for valid reader if (!reader) @@ -363,7 +400,6 @@ if (time.Values.size() > 1) { const GenericScopedLock lock(getFrameCriticalSection); - std::shared_ptr new_frame; // create buffer and resampler juce::AudioSampleBuffer *samples = NULL; @@ -371,15 +407,7 @@ resampler = new AudioResampler(); // Get new frame number - int new_frame_number = adjust_frame_number_minimum(round(time.GetValue(frame_number))); - - // Create a new frame - int samples_in_frame = Frame::GetSamplesPerFrame(new_frame_number, reader->info.fps, reader->info.sample_rate, frame->GetAudioChannelsCount()); - new_frame = std::make_shared(new_frame_number, 1, 1, "#000000", samples_in_frame, frame->GetAudioChannelsCount()); - - // Copy the image from the new frame - new_frame->AddImage(GetOrCreateFrame(new_frame_number)->GetImage()); - + int new_frame_number = frame->number; // Get delta (difference in previous Y value) int delta = int(round(time.GetDelta(frame_number))); @@ -427,7 +455,7 @@ start -= 1; for (int channel = 0; channel < channels; channel++) // Add new (slower) samples, to the frame object - new_frame->AddAudio(true, channel, 0, resampled_buffer->getReadPointer(channel, start), + frame->AddAudio(true, channel, 0, resampled_buffer->getReadPointer(channel, start), number_of_samples, 1.0f); // Clean up @@ -535,7 +563,7 @@ // Add the newly resized audio samples to the current frame for (int channel = 0; channel < channels; channel++) // Add new (slower) samples, to the frame object - new_frame->AddAudio(true, channel, 0, buffer->getReadPointer(channel), number_of_samples, 1.0f); + frame->AddAudio(true, channel, 0, buffer->getReadPointer(channel), number_of_samples, 1.0f); // Clean up buffer = NULL; @@ -556,7 +584,7 @@ // Add reversed samples to the frame object for (int channel = 0; channel < channels; channel++) - new_frame->AddAudio(true, channel, 0, samples->getReadPointer(channel), number_of_samples, 1.0f); + frame->AddAudio(true, channel, 0, samples->getReadPointer(channel), number_of_samples, 1.0f); } @@ -564,13 +592,7 @@ delete samples; samples = NULL; } - - // Return new time mapped frame - return new_frame; - - } else - // Use original frame - return frame; + } } // Adjust frame number minimum value @@ -596,35 +618,6 @@ // Debug output ZmqLogger::Instance()->AppendDebugMethod("Clip::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - // Determine the max size of this clips source image (based on the timeline's size, the scaling mode, - // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, - // without loosing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline - // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in - // the future. - if (scale == SCALE_FIT || scale == SCALE_STRETCH) { - // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) - float max_scale_x = scale_x.GetMaxPoint().co.Y; - float max_scale_y = scale_y.GetMaxPoint().co.Y; - reader->SetMaxSize(max(float(max_width), max_width * max_scale_x), max(float(max_height), max_height * max_scale_y)); - - } else if (scale == SCALE_CROP) { - // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) - float max_scale_x = scale_x.GetMaxPoint().co.Y; - float max_scale_y = scale_y.GetMaxPoint().co.Y; - QSize width_size(max_width * max_scale_x, round(max_width / (float(reader->info.width) / float(reader->info.height)))); - QSize height_size(round(max_height / (float(reader->info.height) / float(reader->info.width))), max_height * max_scale_y); - - // respect aspect ratio - if (width_size.width() >= max_width && width_size.height() >= max_height) - reader->SetMaxSize(max(max_width, width_size.width()), max(max_height, width_size.height())); - else - reader->SetMaxSize(max(max_width, height_size.width()), max(max_height, height_size.height())); - - } else { - // No scaling, use original image size (slower) - reader->SetMaxSize(0, 0); - } - // Attempt to get a frame (but this could fail if a reader has just been closed) new_frame = reader->GetFrame(number); @@ -647,6 +640,7 @@ new_frame = std::make_shared(number, reader->info.width, reader->info.height, "#000000", samples_in_frame, reader->info.channels); new_frame->SampleRate(reader->info.sample_rate); new_frame->ChannelsLayout(reader->info.channel_layout); + new_frame->AddAudioSilence(samples_in_frame); return new_frame; } @@ -670,8 +664,8 @@ root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 30 * 60 * 60 * 48, true, requested_frame); root["gravity"] = add_property_json("Gravity", gravity, "int", "", NULL, 0, 8, false, requested_frame); root["scale"] = add_property_json("Scale", scale, "int", "", NULL, 0, 3, false, requested_frame); - root["anchor"] = add_property_json("Anchor", anchor, "int", "", NULL, 0, 1, false, requested_frame); root["display"] = add_property_json("Frame Number", display, "int", "", NULL, 0, 3, false, requested_frame); + root["mixing"] = add_property_json("Volume Mixing", mixing, "int", "", NULL, 0, 2, false, requested_frame); root["waveform"] = add_property_json("Waveform", waveform, "int", "", NULL, 0, 1, false, requested_frame); // Add gravity choices (dropdown style) @@ -691,16 +685,17 @@ root["scale"]["choices"].append(add_property_choice_json("Stretch", SCALE_STRETCH, scale)); root["scale"]["choices"].append(add_property_choice_json("None", SCALE_NONE, scale)); - // Add anchor choices (dropdown style) - root["anchor"]["choices"].append(add_property_choice_json("Canvas", ANCHOR_CANVAS, anchor)); - root["anchor"]["choices"].append(add_property_choice_json("Viewport", ANCHOR_VIEWPORT, anchor)); - // Add frame number display choices (dropdown style) root["display"]["choices"].append(add_property_choice_json("None", FRAME_DISPLAY_NONE, display)); root["display"]["choices"].append(add_property_choice_json("Clip", FRAME_DISPLAY_CLIP, display)); root["display"]["choices"].append(add_property_choice_json("Timeline", FRAME_DISPLAY_TIMELINE, display)); root["display"]["choices"].append(add_property_choice_json("Both", FRAME_DISPLAY_BOTH, display)); + // Add volume mixing choices (dropdown style) + root["mixing"]["choices"].append(add_property_choice_json("None", VOLUME_MIX_NONE, mixing)); + root["mixing"]["choices"].append(add_property_choice_json("Average", VOLUME_MIX_AVERAGE, mixing)); + root["mixing"]["choices"].append(add_property_choice_json("Reduce", VOLUME_MIX_REDUCE, mixing)); + // Add waveform choices (dropdown style) root["waveform"]["choices"].append(add_property_choice_json("Yes", true, waveform)); root["waveform"]["choices"].append(add_property_choice_json("No", false, waveform)); @@ -740,6 +735,7 @@ root["scale"] = scale; root["anchor"] = anchor; root["display"] = display; + root["mixing"] = mixing; root["waveform"] = waveform; root["scale_x"] = scale_x.JsonValue(); root["scale_y"] = scale_y.JsonValue(); @@ -826,6 +822,8 @@ anchor = (AnchorType) root["anchor"].asInt(); if (!root["display"].isNull()) display = (FrameDisplayType) root["display"].asInt(); + if (!root["mixing"].isNull()) + mixing = (VolumeMixType) root["mixing"].asInt(); if (!root["waveform"].isNull()) waveform = root["waveform"].asBool(); if (!root["scale_x"].isNull()) @@ -897,13 +895,14 @@ if (!existing_effect["type"].isNull()) { // Create instance of effect - e = EffectInfo().CreateEffect(existing_effect["type"].asString()); + if (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) { - // Load Json into Effect - e->SetJsonValue(existing_effect); + // Load Json into Effect + e->SetJsonValue(existing_effect); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } } } @@ -966,9 +965,11 @@ reader->SetJsonValue(root["reader"]); } - // mark as managed reader - if (reader) + // mark as managed reader and set parent + if (reader) { + reader->SetClip(this); manage_reader = true; + } // Re-Open reader (if needed) if (already_open) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/CMakeLists.txt libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/CMakeLists.txt --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/CMakeLists.txt 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/CMakeLists.txt 2019-03-21 07:31:31.000000000 +0000 @@ -37,12 +37,12 @@ ENDIF(WIN32) IF (APPLE) # If you still get errors compiling with GCC 4.8, mac headers need to be patched: http://hamelot.co.uk/programming/osx-gcc-dispatch_block_t-has-not-been-declared-invalid-typedef/ - SET_PROPERTY(GLOBAL PROPERTY JUCE_MAC "JUCE_MAC") - ADD_DEFINITIONS(-DNDEBUG) - SET(EXTENSION "mm") - - SET(JUCE_PLATFORM_SPECIFIC_DIR build/macosx/platform_specific_code) - SET(JUCE_PLATFORM_SPECIFIC_LIBRARIES "-framework Carbon -framework Cocoa -framework CoreFoundation -framework CoreAudio -framework CoreMidi -framework IOKit -framework AGL -framework AudioToolbox -framework QuartzCore -lobjc -framework Accelerate") + SET_PROPERTY(GLOBAL PROPERTY JUCE_MAC "JUCE_MAC") + ADD_DEFINITIONS(-DNDEBUG) + SET(EXTENSION "mm") + + SET(JUCE_PLATFORM_SPECIFIC_DIR build/macosx/platform_specific_code) + SET(JUCE_PLATFORM_SPECIFIC_LIBRARIES "-framework Carbon -framework Cocoa -framework CoreFoundation -framework CoreAudio -framework CoreMidi -framework IOKit -framework AGL -framework AudioToolbox -framework QuartzCore -lobjc -framework Accelerate") ENDIF(APPLE) ################ IMAGE MAGICK ################## @@ -74,20 +74,43 @@ SET(CMAKE_SWIG_FLAGS "-DUSE_IMAGEMAGICK=1") ENDIF (ImageMagick_FOUND) - + ################### FFMPEG ##################### # Find FFmpeg libraries (used for video encoding / decoding) FIND_PACKAGE(FFmpeg REQUIRED) -# Include FFmpeg headers (needed for compile) -include_directories(${FFMPEG_INCLUDE_DIR}) +IF (AVCODEC_FOUND) + include_directories(${AVCODEC_INCLUDE_DIRS}) +ENDIF (AVCODEC_FOUND) +IF (AVDEVICE_FOUND) + include_directories(${AVDEVICE_INCLUDE_DIRS}) +ENDIF (AVDEVICE_FOUND) +IF (AVFORMAT_FOUND) + include_directories(${AVFORMAT_INCLUDE_DIRS}) +ENDIF (AVFORMAT_FOUND) +IF (AVFILTER_FOUND) + include_directories(${AVFILTER_INCLUDE_DIRS}) +ENDIF (AVFILTER_FOUND) +IF (AVUTIL_FOUND) + include_directories(${AVUTIL_INCLUDE_DIRS}) +ENDIF (AVUTIL_FOUND) +IF (POSTPROC_FOUND) + include_directories(${POSTPROC_INCLUDE_DIRS}) +ENDIF (POSTPROC_FOUND) +IF (SWSCALE_FOUND) + include_directories(${SWSCALE_INCLUDE_DIRS}) +ENDIF (SWSCALE_FOUND) +IF (SWRESAMPLE_FOUND) + include_directories(${SWRESAMPLE_INCLUDE_DIRS}) +ENDIF (SWRESAMPLE_FOUND) +IF (AVRESAMPLE_FOUND) + include_directories(${AVRESAMPLE_INCLUDE_DIRS}) +ENDIF (AVRESAMPLE_FOUND) ################# LIBOPENSHOT-AUDIO ################### # Find JUCE-based openshot Audio libraries FIND_PACKAGE(OpenShotAudio REQUIRED) -message('LIBOPENSHOT_AUDIO_INCLUDE_DIRS: ${LIBOPENSHOT_AUDIO_INCLUDE_DIRS}') - # Include Juce headers (needed for compile) include_directories(${LIBOPENSHOT_AUDIO_INCLUDE_DIRS}) @@ -112,11 +135,11 @@ add_definitions(${Qt5Multimedia_DEFINITIONS}) add_definitions(${Qt5MultimediaWidgets_DEFINITIONS}) -SET(QT_LIBRARIES ${Qt5Widgets_LIBRARIES} - ${Qt5Core_LIBRARIES} - ${Qt5Gui_LIBRARIES} - ${Qt5Multimedia_LIBRARIES} - ${Qt5MultimediaWidgets_LIBRARIES}) +SET(QT_LIBRARIES ${Qt5Widgets_LIBRARIES} + ${Qt5Core_LIBRARIES} + ${Qt5Gui_LIBRARIES} + ${Qt5Multimedia_LIBRARIES} + ${Qt5MultimediaWidgets_LIBRARIES}) # Set compiler flags for Qt set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Widgets_EXECUTABLE_COMPILE_FLAGS} ") @@ -133,15 +156,15 @@ # Find BlackMagic DeckLinkAPI libraries IF (ENABLE_BLACKMAGIC) FIND_PACKAGE(BlackMagic) - + IF (BLACKMAGIC_FOUND) # Include headers (needed for compile) include_directories(${BLACKMAGIC_INCLUDE_DIR}) - + # define a global var (used in the C++) add_definitions( -DUSE_BLACKMAGIC=1 ) SET(CMAKE_SWIG_FLAGS "-DUSE_BLACKMAGIC=1") - + ENDIF (BLACKMAGIC_FOUND) ENDIF (ENABLE_BLACKMAGIC) @@ -150,23 +173,36 @@ FIND_PACKAGE(OpenMP) if (OPENMP_FOUND) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS} ") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS} ") endif(OPENMP_FOUND) ################### ZEROMQ ##################### # Find ZeroMQ library (used for socket communication & logging) FIND_PACKAGE(ZMQ REQUIRED) -# Include FFmpeg headers (needed for compile) +# Include ZeroMQ headers (needed for compile) include_directories(${ZMQ_INCLUDE_DIRS}) +################### RESVG ##################### +# Find resvg library (used for rendering svg files) +FIND_PACKAGE(RESVG) + +# Include resvg headers (optional SVG library) +if (RESVG_FOUND) + include_directories(${RESVG_INCLUDE_DIRS}) + + # define a global var (used in the C++) + add_definitions( -DUSE_RESVG=1 ) + SET(CMAKE_SWIG_FLAGS "-DUSE_RESVG=1") +endif(RESVG_FOUND) + ################### JSONCPP ##################### # Include jsoncpp headers (needed for JSON parsing) if (USE_SYSTEM_JSONCPP) find_package(JsonCpp REQUIRED) include_directories(${JSONCPP_INCLUDE_DIRS}) else() - message("Using embedded JsonCpp") + message("-- Could NOT find JsonCpp library (Using embedded JsonCpp instead)") include_directories("../thirdparty/jsoncpp/include") endif(USE_SYSTEM_JSONCPP) @@ -182,8 +218,8 @@ ############### SET LIBRARY SOURCE FILES ################# SET ( OPENSHOT_SOURCE_FILES - AudioBufferSource.cpp - AudioReaderSource.cpp + AudioBufferSource.cpp + AudioReaderSource.cpp AudioResampler.cpp CacheBase.cpp CacheDisk.cpp @@ -214,38 +250,39 @@ Profiles.cpp QtImageReader.cpp QtPlayer.cpp + Settings.cpp Timeline.cpp - + # Qt Video Player ${QT_PLAYER_FILES} ${MOC_FILES}) - IF (NOT USE_SYSTEM_JSONCPP) - # Third Party JSON Parser - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - ../thirdparty/jsoncpp/src/lib_json/json_reader.cpp - ../thirdparty/jsoncpp/src/lib_json/json_value.cpp - ../thirdparty/jsoncpp/src/lib_json/json_writer.cpp) - ENDIF (NOT USE_SYSTEM_JSONCPP) - - # ImageMagic related files - IF (ImageMagick_FOUND) - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - ImageReader.cpp - ImageWriter.cpp - TextReader.cpp) - ENDIF (ImageMagick_FOUND) - - # BlackMagic related files - IF (BLACKMAGIC_FOUND) - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - DecklinkInput.cpp - DecklinkReader.cpp - DecklinkOutput.cpp - DecklinkWriter.cpp) - ENDIF (BLACKMAGIC_FOUND) - - +IF (NOT USE_SYSTEM_JSONCPP) + # Third Party JSON Parser + SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} + ../thirdparty/jsoncpp/src/lib_json/json_reader.cpp + ../thirdparty/jsoncpp/src/lib_json/json_value.cpp + ../thirdparty/jsoncpp/src/lib_json/json_writer.cpp) +ENDIF (NOT USE_SYSTEM_JSONCPP) + +# ImageMagic related files +IF (ImageMagick_FOUND) + SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} + ImageReader.cpp + ImageWriter.cpp + TextReader.cpp) +ENDIF (ImageMagick_FOUND) + +# BlackMagic related files +IF (BLACKMAGIC_FOUND) + SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} + DecklinkInput.cpp + DecklinkReader.cpp + DecklinkOutput.cpp + DecklinkWriter.cpp) +ENDIF (BLACKMAGIC_FOUND) + + # Get list of headers file(GLOB_RECURSE headers ${CMAKE_SOURCE_DIR}/include/*.h) @@ -254,44 +291,75 @@ ############### CREATE LIBRARY ################# # Create shared openshot library -add_library(openshot SHARED - ${OPENSHOT_SOURCE_FILES} - ${headers} ) +add_library(openshot SHARED + ${OPENSHOT_SOURCE_FILES} + ${headers} ) # Set SONAME and other library properties set_target_properties(openshot - PROPERTIES - VERSION ${PROJECT_VERSION} - SOVERSION ${SO_VERSION} - INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib" - ) + PROPERTIES + VERSION ${PROJECT_VERSION} + SOVERSION ${SO_VERSION} + INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib" + ) ############### LINK LIBRARY ################# SET ( REQUIRED_LIBRARIES - ${FFMPEG_LIBRARIES} ${LIBOPENSHOT_AUDIO_LIBRARIES} ${QT_LIBRARIES} ${PROFILER} ${JSONCPP_LIBRARY} ${ZMQ_LIBRARIES} ) - - IF (OPENMP_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${OpenMP_CXX_FLAGS} ) - ENDIF (OPENMP_FOUND) - - IF (ImageMagick_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${ImageMagick_LIBRARIES} ) - ENDIF (ImageMagick_FOUND) - - IF (BLACKMAGIC_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${BLACKMAGIC_LIBRARY_DIR} ) - ENDIF (BLACKMAGIC_FOUND) - - IF (WIN32) - # Required for exception handling on Windows - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} "imagehlp" ) - ENDIF(WIN32) + +IF (AVCODEC_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVCODEC_LIBRARIES} ) +ENDIF (AVCODEC_FOUND) +IF (AVDEVICE_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVDEVICE_LIBRARIES} ) +ENDIF (AVDEVICE_FOUND) +IF (AVFORMAT_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVFORMAT_LIBRARIES} ) +ENDIF (AVFORMAT_FOUND) +IF (AVFILTER_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVFILTER_LIBRARIES} ) +ENDIF (AVFILTER_FOUND) +IF (AVUTIL_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVUTIL_LIBRARIES} ) +ENDIF (AVUTIL_FOUND) +IF (POSTPROC_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${POSTPROC_LIBRARIES} ) +ENDIF (POSTPROC_FOUND) +IF (SWSCALE_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${SWSCALE_LIBRARIES} ) +ENDIF (SWSCALE_FOUND) +IF (SWRESAMPLE_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${SWRESAMPLE_LIBRARIES} ) +ENDIF (SWRESAMPLE_FOUND) +IF (AVRESAMPLE_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVRESAMPLE_LIBRARIES} ) +ENDIF (AVRESAMPLE_FOUND) + +IF (RESVG_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${RESVG_LIBRARIES} ) +ENDIF(RESVG_FOUND) + +IF (OPENMP_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${OpenMP_CXX_FLAGS} ) +ENDIF (OPENMP_FOUND) + +IF (ImageMagick_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${ImageMagick_LIBRARIES} ) +ENDIF (ImageMagick_FOUND) + +IF (BLACKMAGIC_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${BLACKMAGIC_LIBRARY_DIR} ) +ENDIF (BLACKMAGIC_FOUND) + +IF (WIN32) + # Required for exception handling on Windows + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} "imagehlp" "dbghelp" ) +ENDIF(WIN32) # Link all referenced libraries target_link_libraries(openshot ${REQUIRED_LIBRARIES}) @@ -314,9 +382,9 @@ ############### TEST BLACKMAGIC CAPTURE APP ################ IF (BLACKMAGIC_FOUND) # Create test executable - add_executable(openshot-blackmagic - examples/ExampleBlackmagic.cpp) - + add_executable(openshot-blackmagic + examples/ExampleBlackmagic.cpp) + # Link test executable to the new library target_link_libraries(openshot-blackmagic openshot) ENDIF (BLACKMAGIC_FOUND) @@ -330,13 +398,13 @@ # Install primary library INSTALL( TARGETS openshot - ARCHIVE DESTINATION ${LIB_INSTALL_DIR} - LIBRARY DESTINATION ${LIB_INSTALL_DIR} - COMPONENT library ) - -INSTALL(FILES ${headers} - DESTINATION ${CMAKE_INSTALL_PREFIX}/include/libopenshot ) - + ARCHIVE DESTINATION ${LIB_INSTALL_DIR} + LIBRARY DESTINATION ${LIB_INSTALL_DIR} + COMPONENT library ) + +INSTALL(DIRECTORY ${CMAKE_SOURCE_DIR}/include/ + DESTINATION ${CMAKE_INSTALL_PREFIX}/include/libopenshot + FILES_MATCHING PATTERN "*.h") ############### CPACK PACKAGING ############## IF(MINGW) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Coordinate.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Coordinate.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Coordinate.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Coordinate.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -32,12 +32,12 @@ // Default constructor for a coordinate, which defaults the X and Y to zero (0,0) Coordinate::Coordinate() : - X(0), Y(0), increasing(true), repeated(1,1), delta(0.0) { + X(0), Y(0) { } // Constructor which also allows the user to set the X and Y Coordinate::Coordinate(double x, double y) : - X(x), Y(y), increasing(true), repeated(1,1), delta(0.0) { + X(x), Y(y) { } @@ -96,15 +96,4 @@ X = root["X"].asDouble(); if (!root["Y"].isNull()) Y = root["Y"].asDouble(); - if (!root["increasing"].isNull()) - increasing = root["increasing"].asBool(); - if (!root["repeated"].isNull() && root["repeated"].isObject()) - { - if (!root["repeated"]["num"].isNull()) - repeated.num = root["repeated"]["num"].asInt(); - if (!root["repeated"]["den"].isNull()) - repeated.den = root["repeated"]["den"].asInt(); - } - if (!root["delta"].isNull()) - delta = root["delta"].asDouble(); } diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/EffectBase.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/EffectBase.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/EffectBase.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/EffectBase.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -58,6 +58,18 @@ cout << "----------------------------" << endl; } +// Constrain a color value from 0 to 255 +int EffectBase::constrain(int color_value) +{ + // Constrain new color from 0 to 255 + if (color_value < 0) + color_value = 0; + else if (color_value > 255) + color_value = 255; + + return color_value; +} + // Generate JSON string of this object string EffectBase::Json() { diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/EffectInfo.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/EffectInfo.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/EffectInfo.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/EffectInfo.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -41,6 +41,9 @@ // Create a new effect instance EffectBase* EffectInfo::CreateEffect(string effect_type) { // Init the matching effect object + if (effect_type == "Bars") + return new Bars(); + if (effect_type == "Blur") return new Blur(); @@ -50,17 +53,36 @@ else if (effect_type == "ChromaKey") return new ChromaKey(); + else if (effect_type == "Color Shift") + return new ColorShift(); + + else if (effect_type == "Crop") + return new Crop(); + else if (effect_type == "Deinterlace") return new Deinterlace(); + else if (effect_type == "Hue") + return new Hue(); + else if (effect_type == "Mask") return new Mask(); else if (effect_type == "Negate") return new Negate(); + else if (effect_type == "Pixelate") + return new Pixelate(); + else if (effect_type == "Saturation") return new Saturation(); + + else if (effect_type == "Shift") + return new Shift(); + + else if (effect_type == "Wave") + return new Wave(); + return NULL; } // Generate Json::JsonValue for this object @@ -70,13 +92,20 @@ Json::Value root; // Append info JSON from each supported effect + root.append(Bars().JsonInfo()); root.append(Blur().JsonInfo()); root.append(Brightness().JsonInfo()); root.append(ChromaKey().JsonInfo()); + root.append(ColorShift().JsonInfo()); + root.append(Crop().JsonInfo()); root.append(Deinterlace().JsonInfo()); + root.append(Hue().JsonInfo()); root.append(Mask().JsonInfo()); root.append(Negate().JsonInfo()); + root.append(Pixelate().JsonInfo()); root.append(Saturation().JsonInfo()); + root.append(Shift().JsonInfo()); + root.append(Wave().JsonInfo()); // return JsonValue return root; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Bars.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Bars.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Bars.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Bars.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,203 @@ +/** + * @file + * @brief Source file for Bars effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../../include/effects/Bars.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +Bars::Bars() : color("#000000"), left(0.0), top(0.1), right(0.0), bottom(0.1) { + // Init effect properties + init_effect_details(); +} + +// Default constructor +Bars::Bars(Color color, Keyframe left, Keyframe top, Keyframe right, Keyframe bottom) : + color(color), left(left), top(top), right(right), bottom(bottom) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void Bars::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Bars"; + info.name = "Bars"; + info.description = "Add colored bars around your video."; + info.has_audio = false; + info.has_video = true; +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr Bars::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + + // Get bar color (and create small color image) + std::shared_ptr tempColor = std::shared_ptr(new QImage(frame_image->width(), 1, QImage::Format_RGBA8888)); + tempColor->fill(QColor(QString::fromStdString(color.GetColorHex(frame_number)))); + + // Get current keyframe values + double left_value = left.GetValue(frame_number); + double top_value = top.GetValue(frame_number); + double right_value = right.GetValue(frame_number); + double bottom_value = bottom.GetValue(frame_number); + + // Get pixel array pointer + unsigned char *pixels = (unsigned char *) frame_image->bits(); + unsigned char *color_pixels = (unsigned char *) tempColor->bits(); + + // Get pixels sizes of all bars + int top_bar_height = top_value * frame_image->height(); + int bottom_bar_height = bottom_value * frame_image->height(); + int left_bar_width = left_value * frame_image->width(); + int right_bar_width = right_value * frame_image->width(); + + // Loop through rows + for (int row = 0; row < frame_image->height(); row++) { + + // Top & Bottom Bar + if ((top_bar_height > 0.0 && row <= top_bar_height) || (bottom_bar_height > 0.0 && row >= frame_image->height() - bottom_bar_height)) { + memcpy(&pixels[row * frame_image->width() * 4], color_pixels, sizeof(char) * frame_image->width() * 4); + } else { + // Left Bar + if (left_bar_width > 0.0) { + memcpy(&pixels[row * frame_image->width() * 4], color_pixels, sizeof(char) * left_bar_width * 4); + } + + // Right Bar + if (right_bar_width > 0.0) { + memcpy(&pixels[((row * frame_image->width()) + (frame_image->width() - right_bar_width)) * 4], color_pixels, sizeof(char) * right_bar_width * 4); + } + } + } + + // Cleanup colors and arrays + tempColor.reset(); + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +string Bars::Json() { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::JsonValue for this object +Json::Value Bars::JsonValue() { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["color"] = color.JsonValue(); + root["left"] = left.JsonValue(); + root["top"] = top.JsonValue(); + root["right"] = right.JsonValue(); + root["bottom"] = bottom.JsonValue(); + + // return JsonValue + return root; +} + +// Load JSON string into this object +void Bars::SetJson(string value) { + + // Parse JSON string into JSON objects + Json::Value root; + Json::Reader reader; + bool success = reader.parse( value, root ); + if (!success) + // Raise exception + throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + + try + { + // Set all values that match + SetJsonValue(root); + } + catch (exception e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + } +} + +// Load Json::JsonValue into this object +void Bars::SetJsonValue(Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["color"].isNull()) + color.SetJsonValue(root["color"]); + if (!root["left"].isNull()) + left.SetJsonValue(root["left"]); + if (!root["top"].isNull()) + top.SetJsonValue(root["top"]); + if (!root["right"].isNull()) + right.SetJsonValue(root["right"]); + if (!root["bottom"].isNull()) + bottom.SetJsonValue(root["bottom"]); +} + +// Get all properties for a specific frame +string Bars::PropertiesJSON(int64_t requested_frame) { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["color"] = add_property_json("Bar Color", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["color"]["red"] = add_property_json("Red", color.red.GetValue(requested_frame), "float", "", &color.red, 0, 255, false, requested_frame); + root["color"]["blue"] = add_property_json("Blue", color.blue.GetValue(requested_frame), "float", "", &color.blue, 0, 255, false, requested_frame); + root["color"]["green"] = add_property_json("Green", color.green.GetValue(requested_frame), "float", "", &color.green, 0, 255, false, requested_frame); + root["left"] = add_property_json("Left Size", left.GetValue(requested_frame), "float", "", &left, 0.0, 0.5, false, requested_frame); + root["top"] = add_property_json("Top Size", top.GetValue(requested_frame), "float", "", &top, 0.0, 0.5, false, requested_frame); + root["right"] = add_property_json("Right Size", right.GetValue(requested_frame), "float", "", &right, 0.0, 0.5, false, requested_frame); + root["bottom"] = add_property_json("Bottom Size", bottom.GetValue(requested_frame), "float", "", &bottom, 0.0, 0.5, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} + diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Blur.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Blur.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Blur.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Blur.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -122,6 +122,12 @@ // Remove boxes delete[] bxs; + + // Copy blur_ back to for vertical blur or next iteration + for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) red[i] = blur_red[i]; + for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) green[i] = blur_green[i]; + for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) blue[i] = blur_blue[i]; + for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) alpha[i] = blur_alpha[i]; } // VERTICAL BLUR (if any) @@ -137,6 +143,12 @@ // Remove boxes delete[] bxs; + + // Copy blur_ back to for vertical blur or next iteration + for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) red[i] = blur_red[i]; + for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) green[i] = blur_green[i]; + for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) blue[i] = blur_blue[i]; + for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) alpha[i] = blur_alpha[i]; } } @@ -290,11 +302,11 @@ // Set data from Json (if key is found) if (!root["horizontal_radius"].isNull()) horizontal_radius.SetJsonValue(root["horizontal_radius"]); - else if (!root["vertical_radius"].isNull()) + if (!root["vertical_radius"].isNull()) vertical_radius.SetJsonValue(root["vertical_radius"]); - else if (!root["sigma"].isNull()) + if (!root["sigma"].isNull()) sigma.SetJsonValue(root["sigma"]); - else if (!root["iterations"].isNull()) + if (!root["iterations"].isNull()) iterations.SetJsonValue(root["iterations"]); } diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Brightness.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Brightness.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Brightness.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Brightness.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -56,19 +56,6 @@ info.has_video = true; } -// Constrain a color value from 0 to 255 -int Brightness::constrain(int color_value) -{ - // Constrain new color from 0 to 255 - if (color_value < 0) - color_value = 0; - else if (color_value > 255) - color_value = 255; - - return color_value; -} - - // This method is required for all derived classes of EffectBase, and returns a // modified openshot::Frame object std::shared_ptr Brightness::GetFrame(std::shared_ptr frame, int64_t frame_number) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/ColorShift.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/ColorShift.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/ColorShift.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/ColorShift.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,292 @@ +/** + * @file + * @brief Source file for Color Shift effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../../include/effects/ColorShift.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +ColorShift::ColorShift() : red_x(-0.05), red_y(0.0), green_x(0.05), green_y(0.0), blue_x(0.0), blue_y(0.0), alpha_x(0.0), alpha_y(0.0) { + // Init effect properties + init_effect_details(); +} + +// Default constructor +ColorShift::ColorShift(Keyframe red_x, Keyframe red_y, Keyframe green_x, Keyframe green_y, Keyframe blue_x, Keyframe blue_y, Keyframe alpha_x, Keyframe alpha_y) : + red_x(red_x), red_y(red_y), green_x(green_x), green_y(green_y), blue_x(blue_x), blue_y(blue_y), alpha_x(alpha_x), alpha_y(alpha_y) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void ColorShift::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Color Shift"; + info.name = "Color Shift"; + info.description = "Shift the colors of an image up, down, left, and right (with infinite wrapping)."; + info.has_audio = false; + info.has_video = true; +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr ColorShift::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + unsigned char *pixels = (unsigned char *) frame_image->bits(); + + // Get image size + int frame_image_width = frame_image->width(); + int frame_image_height = frame_image->height(); + + // Get the current shift amount, and clamp to range (-1 to 1 range) + // Red Keyframes + float red_x_shift = red_x.GetValue(frame_number); + int red_x_shift_limit = round(frame_image_width * fmod(abs(red_x_shift), 1.0)); + float red_y_shift = red_y.GetValue(frame_number); + int red_y_shift_limit = round(frame_image_height * fmod(abs(red_y_shift), 1.0)); + // Green Keyframes + float green_x_shift = green_x.GetValue(frame_number); + int green_x_shift_limit = round(frame_image_width * fmod(abs(green_x_shift), 1.0)); + float green_y_shift = green_y.GetValue(frame_number); + int green_y_shift_limit = round(frame_image_height * fmod(abs(green_y_shift), 1.0)); + // Blue Keyframes + float blue_x_shift = blue_x.GetValue(frame_number); + int blue_x_shift_limit = round(frame_image_width * fmod(abs(blue_x_shift), 1.0)); + float blue_y_shift = blue_y.GetValue(frame_number); + int blue_y_shift_limit = round(frame_image_height * fmod(abs(blue_y_shift), 1.0)); + // Alpha Keyframes + float alpha_x_shift = alpha_x.GetValue(frame_number); + int alpha_x_shift_limit = round(frame_image_width * fmod(abs(alpha_x_shift), 1.0)); + float alpha_y_shift = alpha_y.GetValue(frame_number); + int alpha_y_shift_limit = round(frame_image_height * fmod(abs(alpha_y_shift), 1.0)); + + + // Make temp copy of pixels + unsigned char *temp_image = new unsigned char[frame_image_width * frame_image_height * 4](); + memcpy(temp_image, pixels, sizeof(char) * frame_image_width * frame_image_height * 4); + + // Init position of current row and pixel + int starting_row_index = 0; + int byte_index = 0; + + // Init RGBA values + unsigned char R = 0; + unsigned char G = 0; + unsigned char B = 0; + unsigned char A = 0; + + int red_starting_row_index = 0; + int green_starting_row_index = 0; + int blue_starting_row_index = 0; + int alpha_starting_row_index = 0; + + int red_pixel_offset = 0; + int green_pixel_offset = 0; + int blue_pixel_offset = 0; + int alpha_pixel_offset = 0; + + // Loop through rows of pixels + for (int row = 0; row < frame_image_height; row++) { + for (int col = 0; col < frame_image_width; col++) { + // Get position of current row and pixel + starting_row_index = row * frame_image_width * 4; + byte_index = starting_row_index + (col * 4); + red_starting_row_index = starting_row_index; + green_starting_row_index = starting_row_index; + blue_starting_row_index = starting_row_index; + alpha_starting_row_index = starting_row_index; + + + red_pixel_offset = 0; + green_pixel_offset = 0; + blue_pixel_offset = 0; + alpha_pixel_offset = 0; + + // Get the RGBA value from each pixel (depending on offset) + R = temp_image[byte_index]; + G = temp_image[byte_index + 1]; + B = temp_image[byte_index + 2]; + A = temp_image[byte_index + 3]; + + // Shift X + if (red_x_shift > 0.0) + red_pixel_offset = (col + red_x_shift_limit) % frame_image_width; + if (red_x_shift < 0.0) + red_pixel_offset = (frame_image_width + col - red_x_shift_limit) % frame_image_width; + if (green_x_shift > 0.0) + green_pixel_offset = (col + green_x_shift_limit) % frame_image_width; + if (green_x_shift < 0.0) + green_pixel_offset = (frame_image_width + col - green_x_shift_limit) % frame_image_width; + if (blue_x_shift > 0.0) + blue_pixel_offset = (col + blue_x_shift_limit) % frame_image_width; + if (blue_x_shift < 0.0) + blue_pixel_offset = (frame_image_width + col - blue_x_shift_limit) % frame_image_width; + if (alpha_x_shift > 0.0) + alpha_pixel_offset = (col + alpha_x_shift_limit) % frame_image_width; + if (alpha_x_shift < 0.0) + alpha_pixel_offset = (frame_image_width + col - alpha_x_shift_limit) % frame_image_width; + + // Shift Y + if (red_y_shift > 0.0) + red_starting_row_index = ((row + red_y_shift_limit) % frame_image_height) * frame_image_width * 4; + if (red_y_shift < 0.0) + red_starting_row_index = ((frame_image_height + row - red_y_shift_limit) % frame_image_height) * frame_image_width * 4; + if (green_y_shift > 0.0) + green_starting_row_index = ((row + green_y_shift_limit) % frame_image_height) * frame_image_width * 4; + if (green_y_shift < 0.0) + green_starting_row_index = ((frame_image_height + row - green_y_shift_limit) % frame_image_height) * frame_image_width * 4; + if (blue_y_shift > 0.0) + blue_starting_row_index = ((row + blue_y_shift_limit) % frame_image_height) * frame_image_width * 4; + if (blue_y_shift < 0.0) + blue_starting_row_index = ((frame_image_height + row - blue_y_shift_limit) % frame_image_height) * frame_image_width * 4; + if (alpha_y_shift > 0.0) + alpha_starting_row_index = ((row + alpha_y_shift_limit) % frame_image_height) * frame_image_width * 4; + if (alpha_y_shift < 0.0) + alpha_starting_row_index = ((frame_image_height + row - alpha_y_shift_limit) % frame_image_height) * frame_image_width * 4; + + // Copy new values to this pixel + pixels[red_starting_row_index + 0 + (red_pixel_offset * 4)] = R; + pixels[green_starting_row_index + 1 + (green_pixel_offset * 4)] = G; + pixels[blue_starting_row_index + 2 + (blue_pixel_offset * 4)] = B; + pixels[alpha_starting_row_index + 3 + (alpha_pixel_offset * 4)] = A; + } + } + + // Delete arrays + delete[] temp_image; + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +string ColorShift::Json() { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::JsonValue for this object +Json::Value ColorShift::JsonValue() { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["red_x"] = red_x.JsonValue(); + root["red_y"] = red_y.JsonValue(); + root["green_x"] = green_x.JsonValue(); + root["green_y"] = green_y.JsonValue(); + root["blue_x"] = blue_x.JsonValue(); + root["blue_y"] = blue_y.JsonValue(); + root["alpha_x"] = alpha_x.JsonValue(); + root["alpha_y"] = alpha_y.JsonValue(); + + // return JsonValue + return root; +} + +// Load JSON string into this object +void ColorShift::SetJson(string value) { + + // Parse JSON string into JSON objects + Json::Value root; + Json::Reader reader; + bool success = reader.parse( value, root ); + if (!success) + // Raise exception + throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + + try + { + // Set all values that match + SetJsonValue(root); + } + catch (exception e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + } +} + +// Load Json::JsonValue into this object +void ColorShift::SetJsonValue(Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["red_x"].isNull()) + red_x.SetJsonValue(root["red_x"]); + if (!root["red_y"].isNull()) + red_y.SetJsonValue(root["red_y"]); + if (!root["green_x"].isNull()) + green_x.SetJsonValue(root["green_x"]); + if (!root["green_y"].isNull()) + green_y.SetJsonValue(root["green_y"]); + if (!root["blue_x"].isNull()) + blue_x.SetJsonValue(root["blue_x"]); + if (!root["blue_y"].isNull()) + blue_y.SetJsonValue(root["blue_y"]); + if (!root["alpha_x"].isNull()) + alpha_x.SetJsonValue(root["alpha_x"]); + if (!root["alpha_y"].isNull()) + alpha_y.SetJsonValue(root["alpha_y"]); +} + +// Get all properties for a specific frame +string ColorShift::PropertiesJSON(int64_t requested_frame) { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["red_x"] = add_property_json("Red X Shift", red_x.GetValue(requested_frame), "float", "", &red_x, -1, 1, false, requested_frame); + root["red_y"] = add_property_json("Red Y Shift", red_y.GetValue(requested_frame), "float", "", &red_y, -1, 1, false, requested_frame); + root["green_x"] = add_property_json("Green X Shift", green_x.GetValue(requested_frame), "float", "", &green_x, -1, 1, false, requested_frame); + root["green_y"] = add_property_json("Green Y Shift", green_y.GetValue(requested_frame), "float", "", &green_y, -1, 1, false, requested_frame); + root["blue_x"] = add_property_json("Blue X Shift", blue_x.GetValue(requested_frame), "float", "", &blue_x, -1, 1, false, requested_frame); + root["blue_y"] = add_property_json("Blue Y Shift", blue_y.GetValue(requested_frame), "float", "", &blue_y, -1, 1, false, requested_frame); + root["alpha_x"] = add_property_json("Alpha X Shift", alpha_x.GetValue(requested_frame), "float", "", &alpha_x, -1, 1, false, requested_frame); + root["alpha_y"] = add_property_json("Alpha Y Shift", alpha_y.GetValue(requested_frame), "float", "", &alpha_y, -1, 1, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} + diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Crop.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Crop.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Crop.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Crop.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,196 @@ +/** + * @file + * @brief Source file for Crop effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../../include/effects/Crop.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +Crop::Crop() : left(0.1), top(0.1), right(0.1), bottom(0.1) { + // Init effect properties + init_effect_details(); +} + +// Default constructor +Crop::Crop(Keyframe left, Keyframe top, Keyframe right, Keyframe bottom) : + left(left), top(top), right(right), bottom(bottom) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void Crop::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Crop"; + info.name = "Crop"; + info.description = "Crop out any part of your video."; + info.has_audio = false; + info.has_video = true; +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr Crop::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + + // Get transparent color (and create small transparent image) + std::shared_ptr tempColor = std::shared_ptr(new QImage(frame_image->width(), 1, QImage::Format_RGBA8888)); + tempColor->fill(QColor(QString::fromStdString("transparent"))); + + // Get current keyframe values + double left_value = left.GetValue(frame_number); + double top_value = top.GetValue(frame_number); + double right_value = right.GetValue(frame_number); + double bottom_value = bottom.GetValue(frame_number); + + // Get pixel array pointers + unsigned char *pixels = (unsigned char *) frame_image->bits(); + unsigned char *color_pixels = (unsigned char *) tempColor->bits(); + + // Get pixels sizes of all crop sides + int top_bar_height = top_value * frame_image->height(); + int bottom_bar_height = bottom_value * frame_image->height(); + int left_bar_width = left_value * frame_image->width(); + int right_bar_width = right_value * frame_image->width(); + + // Loop through rows + for (int row = 0; row < frame_image->height(); row++) { + + // Top & Bottom Crop + if ((top_bar_height > 0.0 && row <= top_bar_height) || (bottom_bar_height > 0.0 && row >= frame_image->height() - bottom_bar_height)) { + memcpy(&pixels[row * frame_image->width() * 4], color_pixels, sizeof(char) * frame_image->width() * 4); + } else { + // Left Crop + if (left_bar_width > 0.0) { + memcpy(&pixels[row * frame_image->width() * 4], color_pixels, sizeof(char) * left_bar_width * 4); + } + + // Right Crop + if (right_bar_width > 0.0) { + memcpy(&pixels[((row * frame_image->width()) + (frame_image->width() - right_bar_width)) * 4], color_pixels, sizeof(char) * right_bar_width * 4); + } + } + } + + // Cleanup colors and arrays + tempColor.reset(); + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +string Crop::Json() { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::JsonValue for this object +Json::Value Crop::JsonValue() { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["left"] = left.JsonValue(); + root["top"] = top.JsonValue(); + root["right"] = right.JsonValue(); + root["bottom"] = bottom.JsonValue(); + + // return JsonValue + return root; +} + +// Load JSON string into this object +void Crop::SetJson(string value) { + + // Parse JSON string into JSON objects + Json::Value root; + Json::Reader reader; + bool success = reader.parse( value, root ); + if (!success) + // Raise exception + throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + + try + { + // Set all values that match + SetJsonValue(root); + } + catch (exception e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + } +} + +// Load Json::JsonValue into this object +void Crop::SetJsonValue(Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["left"].isNull()) + left.SetJsonValue(root["left"]); + if (!root["top"].isNull()) + top.SetJsonValue(root["top"]); + if (!root["right"].isNull()) + right.SetJsonValue(root["right"]); + if (!root["bottom"].isNull()) + bottom.SetJsonValue(root["bottom"]); +} + +// Get all properties for a specific frame +string Crop::PropertiesJSON(int64_t requested_frame) { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["left"] = add_property_json("Left Size", left.GetValue(requested_frame), "float", "", &left, 0.0, 1.0, false, requested_frame); + root["top"] = add_property_json("Top Size", top.GetValue(requested_frame), "float", "", &top, 0.0, 1.0, false, requested_frame); + root["right"] = add_property_json("Right Size", right.GetValue(requested_frame), "float", "", &right, 0.0, 1.0, false, requested_frame); + root["bottom"] = add_property_json("Bottom Size", bottom.GetValue(requested_frame), "float", "", &bottom, 0.0, 1.0, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} + diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Hue.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Hue.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Hue.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Hue.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,173 @@ +/** + * @file + * @brief Source file for Hue effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../../include/effects/Hue.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +Hue::Hue() : Hue(0.0) { + // Init effect properties + init_effect_details(); +} + +// Default constructor +Hue::Hue(Keyframe hue): hue(hue) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void Hue::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Hue"; + info.name = "Hue"; + info.description = "Adjust the hue / color of the frame's image."; + info.has_audio = false; + info.has_video = true; +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr Hue::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + + // Get the current hue percentage shift amount, and convert to degrees + double degrees = 360.0 * hue.GetValue(frame_number); + float cosA = cos(degrees*3.14159265f/180); + float sinA = sin(degrees*3.14159265f/180); + + // Calculate a rotation matrix for the RGB colorspace (based on the current hue shift keyframe value) + float matrix[3][3] = {{cosA + (1.0f - cosA) / 3.0f, 1.0f/3.0f * (1.0f - cosA) - sqrtf(1.0f/3.0f) * sinA, 1.0f/3.0f * (1.0f - cosA) + sqrtf(1.0f/3.0f) * sinA}, + {1.0f/3.0f * (1.0f - cosA) + sqrtf(1.0f/3.0f) * sinA, cosA + 1.0f/3.0f*(1.0f - cosA), 1.0f/3.0f * (1.0f - cosA) - sqrtf(1.0f/3.0f) * sinA}, + {1.0f/3.0f * (1.0f - cosA) - sqrtf(1.0f/3.0f) * sinA, 1.0f/3.0f * (1.0f - cosA) + sqrtf(1.0f/3.0f) * sinA, cosA + 1.0f/3.0f * (1.0f - cosA)}}; + + // Loop through pixels + unsigned char *pixels = (unsigned char *) frame_image->bits(); + for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) + { + // Get the RGB values from the pixel + int R = pixels[byte_index]; + int G = pixels[byte_index + 1]; + int B = pixels[byte_index + 2]; + int A = pixels[byte_index + 3]; + + // Multiply each color by the hue rotation matrix + float rx = constrain(R * matrix[0][0] + G * matrix[0][1] + B * matrix[0][2]); + float gx = constrain(R * matrix[1][0] + G * matrix[1][1] + B * matrix[1][2]); + float bx = constrain(R * matrix[2][0] + G * matrix[2][1] + B * matrix[2][2]); + + // Set all pixels to new value + pixels[byte_index] = rx; + pixels[byte_index + 1] = gx; + pixels[byte_index + 2] = bx; + pixels[byte_index + 3] = A; // leave the alpha value alone + } + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +string Hue::Json() { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::JsonValue for this object +Json::Value Hue::JsonValue() { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["hue"] = hue.JsonValue(); + + // return JsonValue + return root; +} + +// Load JSON string into this object +void Hue::SetJson(string value) { + + // Parse JSON string into JSON objects + Json::Value root; + Json::Reader reader; + bool success = reader.parse( value, root ); + if (!success) + // Raise exception + throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + + try + { + // Set all values that match + SetJsonValue(root); + } + catch (exception e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + } +} + +// Load Json::JsonValue into this object +void Hue::SetJsonValue(Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["hue"].isNull()) + hue.SetJsonValue(root["hue"]); +} + +// Get all properties for a specific frame +string Hue::PropertiesJSON(int64_t requested_frame) { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["hue"] = add_property_json("Hue", hue.GetValue(requested_frame), "float", "", &hue, 0.0, 1.0, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} + diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Mask.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Mask.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Mask.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Mask.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -30,14 +30,14 @@ using namespace openshot; /// Blank constructor, useful when using Json to load the effect properties -Mask::Mask() : reader(NULL), replace_image(false) { +Mask::Mask() : reader(NULL), replace_image(false), needs_refresh(true) { // Init effect properties init_effect_details(); } // Default constructor Mask::Mask(ReaderBase *mask_reader, Keyframe mask_brightness, Keyframe mask_contrast) : - reader(mask_reader), brightness(mask_brightness), contrast(mask_contrast), replace_image(false) + reader(mask_reader), brightness(mask_brightness), contrast(mask_contrast), replace_image(false), needs_refresh(true) { // Init effect properties init_effect_details(); @@ -57,108 +57,90 @@ info.has_video = true; } -// Constrain a color value from 0 to 255 -int Mask::constrain(int color_value) -{ - // Constrain new color from 0 to 255 - if (color_value < 0) - color_value = 0; - else if (color_value > 255) - color_value = 255; - - return color_value; -} - -// Get grayscale mask image -void Mask::set_grayscale_mask(std::shared_ptr mask_frame_image, int width, int height, float brightness, float contrast) -{ - // Get pixels for mask image - unsigned char *pixels = (unsigned char *) mask_frame_image->bits(); - - // Convert the mask image to grayscale - // Loop through pixels - for (int pixel = 0, byte_index=0; pixel < mask_frame_image->width() * mask_frame_image->height(); pixel++, byte_index+=4) - { - // Get the RGB values from the pixel - int R = pixels[byte_index]; - int G = pixels[byte_index + 1]; - int B = pixels[byte_index + 2]; - - // Get the average luminosity - int gray_value = qGray(R, G, B); - - // Adjust the contrast - float factor = (259 * (contrast + 255)) / (255 * (259 - contrast)); - gray_value = constrain((factor * (gray_value - 128)) + 128); - - // Adjust the brightness - gray_value += (255 * brightness); - - // Constrain the value from 0 to 255 - gray_value = constrain(gray_value); - - // Set all pixels to gray value - pixels[byte_index] = gray_value; - pixels[byte_index + 1] = gray_value; - pixels[byte_index + 2] = gray_value; - pixels[byte_index + 3] = 255; - } -} - // This method is required for all derived classes of EffectBase, and returns a // modified openshot::Frame object -std::shared_ptr Mask::GetFrame(std::shared_ptr frame, int64_t frame_number) -{ +std::shared_ptr Mask::GetFrame(std::shared_ptr frame, int64_t frame_number) { // Get the mask image (from the mask reader) std::shared_ptr frame_image = frame->GetImage(); // Check if mask reader is open - if (reader && !reader->IsOpen()) - #pragma omp critical (open_mask_reader) - reader->Open(); + #pragma omp critical (open_mask_reader) + { + if (reader && !reader->IsOpen()) + reader->Open(); + } // No reader (bail on applying the mask) if (!reader) return frame; // Get mask image (if missing or different size than frame image) - if (!original_mask || !reader->info.has_single_image || - (original_mask && original_mask->size() != frame_image->size())) { - #pragma omp critical (open_mask_reader) - { + #pragma omp critical (open_mask_reader) + { + if (!original_mask || !reader->info.has_single_image || needs_refresh || + (original_mask && original_mask->size() != frame_image->size())) { + // Only get mask if needed - std::shared_ptr mask_without_sizing = std::shared_ptr(new QImage(*reader->GetFrame(frame_number)->GetImage())); + std::shared_ptr mask_without_sizing = std::shared_ptr( + new QImage(*reader->GetFrame(frame_number)->GetImage())); // Resize mask image to match frame size original_mask = std::shared_ptr(new QImage( mask_without_sizing->scaled(frame_image->width(), frame_image->height(), Qt::IgnoreAspectRatio, - Qt::SmoothTransformation))); + Qt::SmoothTransformation))); } } - // Convert mask to grayscale and resize to frame size - std::shared_ptr mask = std::shared_ptr(new QImage(*original_mask)); - set_grayscale_mask(mask, frame_image->width(), frame_image->height(), brightness.GetValue(frame_number), contrast.GetValue(frame_number)); + // Refresh no longer needed + needs_refresh = false; - // Get pixels for frame image + // Get pixel arrays unsigned char *pixels = (unsigned char *) frame_image->bits(); - unsigned char *mask_pixels = (unsigned char *) mask->bits(); + unsigned char *mask_pixels = (unsigned char *) original_mask->bits(); - // Convert the mask image to grayscale - // Loop through pixels - for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) + int R = 0; + int G = 0; + int B = 0; + int A = 0; + int gray_value = 0; + float factor = 0.0; + double contrast_value = (contrast.GetValue(frame_number)); + double brightness_value = (brightness.GetValue(frame_number)); + + // Loop through mask pixels, and apply average gray value to frame alpha channel + for (int pixel = 0, byte_index=0; pixel < original_mask->width() * original_mask->height(); pixel++, byte_index+=4) { // Get the RGB values from the pixel - int Frame_Alpha = pixels[byte_index + 3]; - int Mask_Value = constrain(Frame_Alpha - (int)mask_pixels[byte_index]); // Red pixel (all colors should have the same value here) + R = mask_pixels[byte_index]; + G = mask_pixels[byte_index + 1]; + B = mask_pixels[byte_index + 2]; - // Set all pixels to gray value - pixels[byte_index + 3] = Mask_Value; - } + // Get the average luminosity + gray_value = qGray(R, G, B); - // Replace the frame's image with the current mask (good for debugging) - if (replace_image) - frame->AddImage(mask); // not typically called when using a mask + // Adjust the contrast + factor = (259 * (contrast_value + 255)) / (255 * (259 - contrast_value)); + gray_value = constrain((factor * (gray_value - 128)) + 128); + + // Adjust the brightness + gray_value += (255 * brightness_value); + + // Constrain the value from 0 to 255 + gray_value = constrain(gray_value); + + // Set the alpha channel to the gray value + if (replace_image) { + // Replace frame pixels with gray value + pixels[byte_index + 0] = gray_value; + pixels[byte_index + 1] = gray_value; + pixels[byte_index + 2] = gray_value; + } else { + // Set alpha channel + A = pixels[byte_index + 3]; + pixels[byte_index + 3] = constrain(A - gray_value); + } + + } // return the modified frame return frame; @@ -227,47 +209,51 @@ contrast.SetJsonValue(root["contrast"]); if (!root["reader"].isNull()) // does Json contain a reader? { - - if (!root["reader"]["type"].isNull()) // does the reader Json contain a 'type'? + #pragma omp critical (open_mask_reader) { - // Close previous reader (if any) - if (reader) + // This reader has changed, so refresh cached assets + needs_refresh = true; + + if (!root["reader"]["type"].isNull()) // does the reader Json contain a 'type'? { - // Close and delete existing reader (if any) - reader->Close(); - delete reader; - reader = NULL; - } + // Close previous reader (if any) + if (reader) { + // Close and delete existing reader (if any) + reader->Close(); + delete reader; + reader = NULL; + } - // Create new reader (and load properties) - string type = root["reader"]["type"].asString(); + // Create new reader (and load properties) + string type = root["reader"]["type"].asString(); - if (type == "FFmpegReader") { + if (type == "FFmpegReader") { - // Create new reader - reader = new FFmpegReader(root["reader"]["path"].asString()); - reader->SetJsonValue(root["reader"]); + // Create new reader + reader = new FFmpegReader(root["reader"]["path"].asString()); + reader->SetJsonValue(root["reader"]); -#ifdef USE_IMAGEMAGICK - } else if (type == "ImageReader") { + #ifdef USE_IMAGEMAGICK + } else if (type == "ImageReader") { - // Create new reader - reader = new ImageReader(root["reader"]["path"].asString()); - reader->SetJsonValue(root["reader"]); -#endif + // Create new reader + reader = new ImageReader(root["reader"]["path"].asString()); + reader->SetJsonValue(root["reader"]); + #endif - } else if (type == "QtImageReader") { + } else if (type == "QtImageReader") { - // Create new reader - reader = new QtImageReader(root["reader"]["path"].asString()); - reader->SetJsonValue(root["reader"]); + // Create new reader + reader = new QtImageReader(root["reader"]["path"].asString()); + reader->SetJsonValue(root["reader"]); - } else if (type == "ChunkReader") { + } else if (type == "ChunkReader") { - // Create new reader - reader = new ChunkReader(root["reader"]["path"].asString(), (ChunkVersion) root["reader"]["chunk_version"].asInt()); - reader->SetJsonValue(root["reader"]); + // Create new reader + reader = new ChunkReader(root["reader"]["path"].asString(), (ChunkVersion) root["reader"]["chunk_version"].asInt()); + reader->SetJsonValue(root["reader"]); + } } } @@ -296,6 +282,11 @@ root["brightness"] = add_property_json("Brightness", brightness.GetValue(requested_frame), "float", "", &brightness, -1.0, 1.0, false, requested_frame); root["contrast"] = add_property_json("Contrast", contrast.GetValue(requested_frame), "float", "", &contrast, 0, 20, false, requested_frame); + if (reader) + root["reader"] = add_property_json("Source", 0.0, "reader", reader->Json(), NULL, 0, 1, false, requested_frame); + else + root["reader"] = add_property_json("Source", 0.0, "reader", "{}", NULL, 0, 1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Pixelate.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Pixelate.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Pixelate.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Pixelate.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,196 @@ +/** + * @file + * @brief Source file for Pixelate effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../../include/effects/Pixelate.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +Pixelate::Pixelate() : pixelization(0.7), left(0.0), top(0.0), right(0.0), bottom(0.0) { + // Init effect properties + init_effect_details(); +} + +// Default constructor +Pixelate::Pixelate(Keyframe pixelization, Keyframe left, Keyframe top, Keyframe right, Keyframe bottom) : + pixelization(pixelization), left(left), top(top), right(right), bottom(bottom) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void Pixelate::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Pixelate"; + info.name = "Pixelate"; + info.description = "Pixelate (increase or decrease) the number of visible pixels."; + info.has_audio = false; + info.has_video = true; +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr Pixelate::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + + // Get current keyframe values + double pixelization_value = 1.0 - min(fabs(pixelization.GetValue(frame_number)), 1.0); + double left_value = left.GetValue(frame_number); + double top_value = top.GetValue(frame_number); + double right_value = right.GetValue(frame_number); + double bottom_value = bottom.GetValue(frame_number); + + if (pixelization_value > 0.0) { + // Resize frame image smaller (based on pixelization value) + std::shared_ptr smaller_frame_image = std::shared_ptr(new QImage(frame_image->scaledToWidth(max(frame_image->width() * pixelization_value, 2.0), Qt::SmoothTransformation))); + + // Resize image back to original size (with no smoothing to create pixelated image) + std::shared_ptr pixelated_image = std::shared_ptr(new QImage(smaller_frame_image->scaledToWidth(frame_image->width(), Qt::FastTransformation).convertToFormat(QImage::Format_RGBA8888))); + + // Get pixel array pointer + unsigned char *pixels = (unsigned char *) frame_image->bits(); + unsigned char *pixelated_pixels = (unsigned char *) pixelated_image->bits(); + + // Get pixels sizes of all margins + int top_bar_height = top_value * frame_image->height(); + int bottom_bar_height = bottom_value * frame_image->height(); + int left_bar_width = left_value * frame_image->width(); + int right_bar_width = right_value * frame_image->width(); + + // Loop through rows + for (int row = 0; row < frame_image->height(); row++) { + + // Copy pixelated pixels into original frame image (where needed) + if ((row >= top_bar_height) && (row <= frame_image->height() - bottom_bar_height)) { + memcpy(&pixels[(row * frame_image->width() + left_bar_width) * 4], &pixelated_pixels[(row * frame_image->width() + left_bar_width) * 4], sizeof(char) * (frame_image->width() - left_bar_width - right_bar_width) * 4); + } + } + + // Cleanup temp images + smaller_frame_image.reset(); + pixelated_image.reset(); + } + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +string Pixelate::Json() { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::JsonValue for this object +Json::Value Pixelate::JsonValue() { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["pixelization"] = pixelization.JsonValue(); + root["left"] = left.JsonValue(); + root["top"] = top.JsonValue(); + root["right"] = right.JsonValue(); + root["bottom"] = bottom.JsonValue(); + + // return JsonValue + return root; +} + +// Load JSON string into this object +void Pixelate::SetJson(string value) { + + // Parse JSON string into JSON objects + Json::Value root; + Json::Reader reader; + bool success = reader.parse( value, root ); + if (!success) + // Raise exception + throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + + try + { + // Set all values that match + SetJsonValue(root); + } + catch (exception e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + } +} + +// Load Json::JsonValue into this object +void Pixelate::SetJsonValue(Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["pixelization"].isNull()) + pixelization.SetJsonValue(root["pixelization"]); + if (!root["left"].isNull()) + left.SetJsonValue(root["left"]); + if (!root["top"].isNull()) + top.SetJsonValue(root["top"]); + if (!root["right"].isNull()) + right.SetJsonValue(root["right"]); + if (!root["bottom"].isNull()) + bottom.SetJsonValue(root["bottom"]); +} + +// Get all properties for a specific frame +string Pixelate::PropertiesJSON(int64_t requested_frame) { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["pixelization"] = add_property_json("Pixelization", pixelization.GetValue(requested_frame), "float", "", &pixelization, 0.0, 0.9999, false, requested_frame); + root["left"] = add_property_json("Left Margin", left.GetValue(requested_frame), "float", "", &left, 0.0, 1.0, false, requested_frame); + root["top"] = add_property_json("Top Margin", top.GetValue(requested_frame), "float", "", &top, 0.0, 1.0, false, requested_frame); + root["right"] = add_property_json("Right Margin", right.GetValue(requested_frame), "float", "", &right, 0.0, 1.0, false, requested_frame); + root["bottom"] = add_property_json("Bottom Margin", bottom.GetValue(requested_frame), "float", "", &bottom, 0.0, 1.0, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} + diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Saturation.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Saturation.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Saturation.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Saturation.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -56,18 +56,6 @@ info.has_video = true; } -// Constrain a color value from 0 to 255 -int Saturation::constrain(int color_value) -{ - // Constrain new color from 0 to 255 - if (color_value < 0) - color_value = 0; - else if (color_value > 255) - color_value = 255; - - return color_value; -} - // This method is required for all derived classes of EffectBase, and returns a // modified openshot::Frame object std::shared_ptr Saturation::GetFrame(std::shared_ptr frame, int64_t frame_number) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Shift.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Shift.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Shift.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Shift.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,207 @@ +/** + * @file + * @brief Source file for Shift effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../../include/effects/Shift.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +Shift::Shift() : x(0.0), y(0.0) { + // Init effect properties + init_effect_details(); +} + +// Default constructor +Shift::Shift(Keyframe x, Keyframe y) : x(x), y(y) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void Shift::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Shift"; + info.name = "Shift"; + info.description = "Shift the image up, down, left, and right (with infinite wrapping)."; + info.has_audio = false; + info.has_video = true; +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr Shift::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + unsigned char *pixels = (unsigned char *) frame_image->bits(); + + // Get the current shift amount, and clamp to range (-1 to 1 range) + double x_shift = x.GetValue(frame_number); + double x_shift_limit = fmod(abs(x_shift), 1.0); + double y_shift = y.GetValue(frame_number); + double y_shift_limit = fmod(abs(y_shift), 1.0); + + // Declare temp arrays to hold pixels while we move things around + unsigned char *temp_row = new unsigned char[frame_image->width() * 4](); + + // X-SHIFT + // Loop through rows + for (int row = 0; row < frame_image->height(); row++) { + // Copy current row's pixels + int starting_row_pixel = row * frame_image->width(); + memcpy(temp_row, &pixels[starting_row_pixel * 4], sizeof(char) * frame_image->width() * 4); + + // Replace current row with left part of the pixels + if (x_shift > 0.0) { + // Move left side to the right + int relative_pixel_start = (int)round(frame_image->width() * x_shift_limit); + memcpy(&pixels[(starting_row_pixel + relative_pixel_start) * 4], &temp_row[0], sizeof(char) * (frame_image->width() - relative_pixel_start) * 4); + + // Move right side to the left + memcpy(&pixels[starting_row_pixel * 4], &temp_row[(frame_image->width() - relative_pixel_start) * 4], sizeof(char) * relative_pixel_start * 4); + } else if (x_shift < 0.0) { + // Move right side to the left + int relative_pixel_start = (int)round(frame_image->width() * x_shift_limit); + memcpy(&pixels[starting_row_pixel * 4], &temp_row[relative_pixel_start * 4], sizeof(char) * (frame_image->width() - relative_pixel_start) * 4); + + // Move left side to the right + memcpy(&pixels[(starting_row_pixel + (frame_image->width() - relative_pixel_start)) * 4], &temp_row[0], sizeof(char) * relative_pixel_start * 4); + } + } + + // Make temp copy of pixels for Y-SHIFT + unsigned char *temp_image = new unsigned char[frame_image->width() * frame_image->height() * 4](); + memcpy(temp_image, pixels, sizeof(char) * frame_image->width() * frame_image->height() * 4); + + // Y-SHIFT + // Replace current row with left part of the pixels + if (y_shift > 0.0) { + // Move top side to bottom + int relative_pixel_start = frame_image->width() * (int)round(frame_image->height() * y_shift_limit); + memcpy(&pixels[relative_pixel_start * 4], temp_image, sizeof(char) * ((frame_image->width() * frame_image->height()) - relative_pixel_start) * 4); + + // Move bottom side to top + memcpy(pixels, &temp_image[((frame_image->width() * frame_image->height()) - relative_pixel_start) * 4], sizeof(char) * relative_pixel_start * 4); + + } else if (y_shift < 0.0) { + // Move bottom side to top + int relative_pixel_start = frame_image->width() * (int)round(frame_image->height() * y_shift_limit); + memcpy(pixels, &temp_image[relative_pixel_start * 4], sizeof(char) * ((frame_image->width() * frame_image->height()) - relative_pixel_start) * 4); + + // Move left side to the right + memcpy(&pixels[((frame_image->width() * frame_image->height()) - relative_pixel_start) * 4], temp_image, sizeof(char) * relative_pixel_start * 4); + } + + // Delete arrays + delete[] temp_row; + delete[] temp_image; + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +string Shift::Json() { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::JsonValue for this object +Json::Value Shift::JsonValue() { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["x"] = x.JsonValue(); + root["y"] = y.JsonValue(); + + // return JsonValue + return root; +} + +// Load JSON string into this object +void Shift::SetJson(string value) { + + // Parse JSON string into JSON objects + Json::Value root; + Json::Reader reader; + bool success = reader.parse( value, root ); + if (!success) + // Raise exception + throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + + try + { + // Set all values that match + SetJsonValue(root); + } + catch (exception e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + } +} + +// Load Json::JsonValue into this object +void Shift::SetJsonValue(Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["x"].isNull()) + x.SetJsonValue(root["x"]); + if (!root["y"].isNull()) + y.SetJsonValue(root["y"]); +} + +// Get all properties for a specific frame +string Shift::PropertiesJSON(int64_t requested_frame) { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["x"] = add_property_json("X Shift", x.GetValue(requested_frame), "float", "", &x, -1, 1, false, requested_frame); + root["y"] = add_property_json("Y Shift", y.GetValue(requested_frame), "float", "", &y, -1, 1, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} + diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Wave.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Wave.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/effects/Wave.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/effects/Wave.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,199 @@ +/** + * @file + * @brief Source file for Wave effect class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../../include/effects/Wave.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +Wave::Wave() : wavelength(0.06), amplitude(0.3), multiplier(0.2), shift_x(0.0), speed_y(0.2) { + // Init effect properties + init_effect_details(); +} + +// Default constructor +Wave::Wave(Keyframe wavelength, Keyframe amplitude, Keyframe multiplier, Keyframe shift_x, Keyframe speed_y) + : wavelength(wavelength), amplitude(amplitude), multiplier(multiplier), shift_x(shift_x), speed_y(speed_y) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void Wave::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Wave"; + info.name = "Wave"; + info.description = "Distort the frame's image into a wave pattern."; + info.has_audio = false; + info.has_video = true; + +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr Wave::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + + // Get pixels for frame image + unsigned char *pixels = (unsigned char *) frame_image->bits(); + + // Make temp copy of pixels before we start changing them + unsigned char *temp_image = new unsigned char[frame_image->width() * frame_image->height() * 4](); + memcpy(temp_image, pixels, sizeof(char) * frame_image->width() * frame_image->height() * 4); + + // Get current keyframe values + double time = frame_number;//abs(((frame_number + 255) % 510) - 255); + double wavelength_value = wavelength.GetValue(frame_number); + double amplitude_value = amplitude.GetValue(frame_number); + double multiplier_value = multiplier.GetValue(frame_number); + double shift_x_value = shift_x.GetValue(frame_number); + double speed_y_value = speed_y.GetValue(frame_number); + + // Loop through pixels + for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) + { + // Calculate X and Y pixel coordinates + int Y = pixel / frame_image->width(); + + // Calculate wave pixel offsets + float noiseVal = (100 + Y * 0.001) * multiplier_value; // Time and time multiplier (to make the wave move) + float noiseAmp = noiseVal * amplitude_value; // Apply amplitude / height of the wave + float waveformVal = sin((Y * wavelength_value) + (time * speed_y_value)); // Waveform algorithm on y-axis + float waveVal = (waveformVal + shift_x_value) * noiseAmp; // Shifts pixels on the x-axis + + int source_X = round(pixel + waveVal) * 4; + if (source_X < 0) + source_X = 0; + if (source_X > frame_image->width() * frame_image->height() * 4 * sizeof(char)) + source_X = (frame_image->width() * frame_image->height() * 4 * sizeof(char)) - (sizeof(char) * 4); + + // Calculate source array location, and target array location, and copy the 4 color values + memcpy(&pixels[byte_index], &temp_image[source_X], sizeof(char) * 4); + } + + // Delete arrays + delete[] temp_image; + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +string Wave::Json() { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::JsonValue for this object +Json::Value Wave::JsonValue() { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["wavelength"] = wavelength.JsonValue(); + root["amplitude"] = amplitude.JsonValue(); + root["multiplier"] = multiplier.JsonValue(); + root["shift_x"] = shift_x.JsonValue(); + root["speed_y"] = speed_y.JsonValue(); + + // return JsonValue + return root; +} + +// Load JSON string into this object +void Wave::SetJson(string value) { + + // Parse JSON string into JSON objects + Json::Value root; + Json::Reader reader; + bool success = reader.parse( value, root ); + if (!success) + // Raise exception + throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + + try + { + // Set all values that match + SetJsonValue(root); + } + catch (exception e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + } +} + +// Load Json::JsonValue into this object +void Wave::SetJsonValue(Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["wavelength"].isNull()) + wavelength.SetJsonValue(root["wavelength"]); + if (!root["amplitude"].isNull()) + amplitude.SetJsonValue(root["amplitude"]); + if (!root["multiplier"].isNull()) + multiplier.SetJsonValue(root["multiplier"]); + if (!root["shift_x"].isNull()) + shift_x.SetJsonValue(root["shift_x"]); + if (!root["speed_y"].isNull()) + speed_y.SetJsonValue(root["speed_y"]); +} + +// Get all properties for a specific frame +string Wave::PropertiesJSON(int64_t requested_frame) { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["wavelength"] = add_property_json("Wave length", wavelength.GetValue(requested_frame), "float", "", &wavelength, 0.0, 3.0, false, requested_frame); + root["amplitude"] = add_property_json("Amplitude", amplitude.GetValue(requested_frame), "float", "", &litude, 0.0, 5.0, false, requested_frame); + root["multiplier"] = add_property_json("Multiplier", multiplier.GetValue(requested_frame), "float", "", &multiplier, 0.0, 10.0, false, requested_frame); + root["shift_x"] = add_property_json("X Shift", shift_x.GetValue(requested_frame), "float", "", &shift_x, 0.0, 1000.0, false, requested_frame); + root["speed_y"] = add_property_json("Vertical speed", speed_y.GetValue(requested_frame), "float", "", &speed_y, 0.0, 300.0, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} + diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/examples/Example.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/examples/Example.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/examples/Example.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/examples/Example.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -36,34 +36,44 @@ int main(int argc, char* argv[]) { - // Create and open reader - FFmpegReader r("/home/jonathan/sintel-120fps-crash.mp4"); - r.Open(); - - // Enable debug logging - ZmqLogger::Instance()->Enable(false); - ZmqLogger::Instance()->Path("/home/jonathan/.openshot_qt/libopenshot2.log"); - CrashHandler::Instance(); - - // Loop a few times - for (int attempt = 1; attempt < 10; attempt++) { - cout << "** Attempt " << attempt << " **" << endl; - - // Read every frame in reader as fast as possible - for (int64_t frame_number = 1; frame_number < r.info.video_length; frame_number++) { - // Get frame object - std::shared_ptr f = r.GetFrame(frame_number); - - // Print frame numbers - cout << frame_number << " [" << f->number << "], " << flush; - if (frame_number % 10 == 0) - cout << endl; - } + FFmpegReader r9("/home/jonathan/Videos/sintel_trailer-720p.mp4"); + r9.Open(); + r9.DisplayInfo(); + + /* WRITER ---------------- */ + FFmpegWriter w9("/home/jonathan/metadata.mp4"); + + // Set options + w9.SetAudioOptions(true, "libmp3lame", r9.info.sample_rate, r9.info.channels, r9.info.channel_layout, 128000); + w9.SetVideoOptions(true, "libx264", r9.info.fps, 1024, 576, Fraction(1,1), false, false, 3000000); + + w9.info.metadata["title"] = "testtest"; + w9.info.metadata["artist"] = "aaa"; + w9.info.metadata["album"] = "bbb"; + w9.info.metadata["year"] = "2015"; + w9.info.metadata["description"] = "ddd"; + w9.info.metadata["comment"] = "eee"; + w9.info.metadata["comment"] = "comment"; + w9.info.metadata["copyright"] = "copyright OpenShot!"; + + // Open writer + w9.Open(); + + for (long int frame = 1; frame <= 100; frame++) + { + //int frame_number = (rand() % 750) + 1; + int frame_number = frame; + std::shared_ptr f = r9.GetFrame(frame_number); + w9.WriteFrame(f); } - cout << "Completed successfully!" << endl; - // Close reader - r.Close(); + // Close writer & reader + w9.Close(); + + // Close timeline + r9.Close(); + + cout << "Completed successfully!" << endl; return 0; } \ No newline at end of file diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/FFmpegReader.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/FFmpegReader.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/FFmpegReader.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/FFmpegReader.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -37,14 +37,15 @@ audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), - current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), packet(NULL) { + current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), + packet(NULL) { // Initialize FFMpeg, and register all formats and codecs - av_register_all(); - avcodec_register_all(); + AV_REGISTER_ALL + AVCODEC_REGISTER_ALL // Init cache - working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 30, info.width, info.height, info.sample_rate, info.channels); + working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * info.fps.ToDouble() * 2, info.width, info.height, info.sample_rate, info.channels); missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); @@ -58,14 +59,15 @@ audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), - current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), packet(NULL) { + current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), + packet(NULL) { // Initialize FFMpeg, and register all formats and codecs - av_register_all(); - avcodec_register_all(); + AV_REGISTER_ALL + AVCODEC_REGISTER_ALL // Init cache - working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 30, info.width, info.height, info.sample_rate, info.channels); + working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * info.fps.ToDouble() * 2, info.width, info.height, info.sample_rate, info.channels); missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); @@ -123,11 +125,11 @@ for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) { // Is this a video stream? - if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0) { + if (AV_GET_CODEC_TYPE(pFormatCtx->streams[i]) == AVMEDIA_TYPE_VIDEO && videoStream < 0) { videoStream = i; } // Is this an audio stream? - if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO && audioStream < 0) { + if (AV_GET_CODEC_TYPE(pFormatCtx->streams[i]) == AVMEDIA_TYPE_AUDIO && audioStream < 0) { audioStream = i; } } @@ -142,20 +144,32 @@ // Set the codec and codec context pointers pStream = pFormatCtx->streams[videoStream]; - pCodecCtx = pFormatCtx->streams[videoStream]->codec; + + // Find the codec ID from stream + AVCodecID codecId = AV_FIND_DECODER_CODEC_ID(pStream); + + // Get codec and codec context from stream + AVCodec *pCodec = avcodec_find_decoder(codecId); + pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); // Set number of threads equal to number of processors (not to exceed 16) - pCodecCtx->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); - // Find the decoder for the video stream - AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) { throw InvalidCodec("A valid video codec could not be found for this file.", path); } + + // Init options + AVDictionary *opts = NULL; + av_dict_set(&opts, "strict", "experimental", 0); + // Open video codec - if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) + if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) throw InvalidCodec("A video codec was found, but could not be opened.", path); + // Free options + av_dict_free(&opts); + // Update the File Info struct with video details (if a video stream is found) UpdateVideoInfo(); } @@ -168,30 +182,50 @@ // Get a pointer to the codec context for the audio stream aStream = pFormatCtx->streams[audioStream]; - aCodecCtx = pFormatCtx->streams[audioStream]->codec; + + // Find the codec ID from stream + AVCodecID codecId = AV_FIND_DECODER_CODEC_ID(aStream); + + // Get codec and codec context from stream + AVCodec *aCodec = avcodec_find_decoder(codecId); + aCodecCtx = AV_GET_CODEC_CONTEXT(aStream, aCodec); // Set number of threads equal to number of processors (not to exceed 16) - aCodecCtx->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + aCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); - // Find the decoder for the audio stream - AVCodec *aCodec = avcodec_find_decoder(aCodecCtx->codec_id); if (aCodec == NULL) { throw InvalidCodec("A valid audio codec could not be found for this file.", path); } + + // Init options + AVDictionary *opts = NULL; + av_dict_set(&opts, "strict", "experimental", 0); + // Open audio codec - if (avcodec_open2(aCodecCtx, aCodec, NULL) < 0) + if (avcodec_open2(aCodecCtx, aCodec, &opts) < 0) throw InvalidCodec("An audio codec was found, but could not be opened.", path); + // Free options + av_dict_free(&opts); + // Update the File Info struct with audio details (if an audio stream is found) UpdateAudioInfo(); } + // Add format metadata (if any) + AVDictionaryEntry *tag = NULL; + while ((tag = av_dict_get(pFormatCtx->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) { + QString str_key = tag->key; + QString str_value = tag->value; + info.metadata[str_key.toStdString()] = str_value.trimmed().toStdString(); + } + // Init previous audio location to zero previous_packet_location.frame = -1; previous_packet_location.sample_start = 0; // Adjust cache size based on size of frame and audio - working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 30, info.width, info.height, info.sample_rate, info.channels); + working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * info.fps.ToDouble() * 2, info.width, info.height, info.sample_rate, info.channels); missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); @@ -214,12 +248,12 @@ if (info.has_video) { avcodec_flush_buffers(pCodecCtx); - avcodec_close(pCodecCtx); + AV_FREE_CONTEXT(pCodecCtx); } if (info.has_audio) { avcodec_flush_buffers(aCodecCtx); - avcodec_close(aCodecCtx); + AV_FREE_CONTEXT(aCodecCtx); } // Clear final cache @@ -261,12 +295,12 @@ info.has_audio = true; info.file_size = pFormatCtx->pb ? avio_size(pFormatCtx->pb) : -1; info.acodec = aCodecCtx->codec->name; - info.channels = aCodecCtx->channels; - if (aCodecCtx->channel_layout == 0) - aCodecCtx->channel_layout = av_get_default_channel_layout( aCodecCtx->channels ); - info.channel_layout = (ChannelLayout) aCodecCtx->channel_layout; - info.sample_rate = aCodecCtx->sample_rate; - info.audio_bit_rate = aCodecCtx->bit_rate; + info.channels = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels; + if (AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout == 0) + AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout = av_get_default_channel_layout( AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels ); + info.channel_layout = (ChannelLayout) AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout; + info.sample_rate = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->sample_rate; + info.audio_bit_rate = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->bit_rate; // Set audio timebase info.audio_timebase.num = aStream->time_base.num; @@ -294,44 +328,56 @@ info.video_length = info.duration * info.fps.ToDouble(); info.width = 720; info.height = 480; + } + // Fix invalid video lengths for certain types of files (MP3 for example) + if (info.has_video && ((info.duration * info.fps.ToDouble()) - info.video_length > 60)) { + info.video_length = info.duration * info.fps.ToDouble(); } + // Add audio metadata (if any found) + AVDictionaryEntry *tag = NULL; + while ((tag = av_dict_get(aStream->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) { + QString str_key = tag->key; + QString str_value = tag->value; + info.metadata[str_key.toStdString()] = str_value.trimmed().toStdString(); + } } void FFmpegReader::UpdateVideoInfo() { + if (check_fps) + // Already initialized all the video metadata, no reason to do it again + return; + // Set values of FileInfo struct info.has_video = true; info.file_size = pFormatCtx->pb ? avio_size(pFormatCtx->pb) : -1; - info.height = pCodecCtx->height; - info.width = pCodecCtx->width; + info.height = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->height; + info.width = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->width; info.vcodec = pCodecCtx->codec->name; - info.video_bit_rate = pFormatCtx->bit_rate; - if (!check_fps) - { - // set frames per second (fps) - info.fps.num = pStream->avg_frame_rate.num; - info.fps.den = pStream->avg_frame_rate.den; - } + info.video_bit_rate = (pFormatCtx->bit_rate / 8); + + // set frames per second (fps) + info.fps.num = pStream->avg_frame_rate.num; + info.fps.den = pStream->avg_frame_rate.den; if (pStream->sample_aspect_ratio.num != 0) { info.pixel_ratio.num = pStream->sample_aspect_ratio.num; info.pixel_ratio.den = pStream->sample_aspect_ratio.den; } - else if (pCodecCtx->sample_aspect_ratio.num != 0) + else if (AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.num != 0) { - info.pixel_ratio.num = pCodecCtx->sample_aspect_ratio.num; - info.pixel_ratio.den = pCodecCtx->sample_aspect_ratio.den; + info.pixel_ratio.num = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.num; + info.pixel_ratio.den = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.den; } else { info.pixel_ratio.num = 1; info.pixel_ratio.den = 1; } - - info.pixel_format = pCodecCtx->pix_fmt; + info.pixel_format = AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx); // Calculate the DAR (display aspect ratio) Fraction size(info.width * info.pixel_ratio.num, info.height * info.pixel_ratio.den); @@ -378,18 +424,19 @@ } // Override an invalid framerate - if (info.fps.ToFloat() > 120.0f || (info.fps.num == 0 || info.fps.den == 0)) - { - // Set a few important default video settings (so audio can be divided into frames) - info.fps.num = 24; - info.fps.den = 1; - info.video_timebase.num = 1; - info.video_timebase.den = 24; - - // Calculate number of frames - info.video_length = round(info.duration * info.fps.ToDouble()); + if (info.fps.ToFloat() > 240.0f || (info.fps.num <= 0 || info.fps.den <= 0) || info.video_length <= 0) { + // Calculate FPS, duration, video bit rate, and video length manually + // by scanning through all the video stream packets + CheckFPS(); } + // Add video metadata (if any) + AVDictionaryEntry *tag = NULL; + while ((tag = av_dict_get(pStream->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) { + QString str_key = tag->key; + QString str_value = tag->value; + info.metadata[str_key.toStdString()] = str_value.trimmed().toStdString(); + } } @@ -422,55 +469,53 @@ } else { - // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(getFrameCriticalSection); - - // Check the cache a 2nd time (due to a potential previous lock) - if (has_missing_frames) - CheckMissingFrame(requested_frame); - frame = final_cache.GetFrame(requested_frame); - if (frame) { - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame on 2nd look", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); - - // Return the cached frame - return frame; - } - - // Frame is not in cache - // Reset seek count - seek_count = 0; - - // Check for first frame (always need to get frame 1 before other frames, to correctly calculate offsets) - if (last_frame == 0 && requested_frame != 1) - // Get first frame - ReadStream(1); - - // Are we within X frames of the requested frame? - int64_t diff = requested_frame - last_frame; - if (diff >= 1 && diff <= 20) - { - // Continue walking the stream - return ReadStream(requested_frame); - } - else - { - // Greater than 30 frames away, or backwards, we need to seek to the nearest key frame - if (enable_seek) - // Only seek if enabled - Seek(requested_frame); + #pragma omp critical (ReadStream) + { + // Check the cache a 2nd time (due to a potential previous lock) + frame = final_cache.GetFrame(requested_frame); + if (frame) { + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame on 2nd look", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); - else if (!enable_seek && diff < 0) - { - // Start over, since we can't seek, and the requested frame is smaller than our position - Close(); - Open(); + // Return the cached frame } + else { + // Frame is not in cache + // Reset seek count + seek_count = 0; + + // Check for first frame (always need to get frame 1 before other frames, to correctly calculate offsets) + if (last_frame == 0 && requested_frame != 1) + // Get first frame + ReadStream(1); + + // Are we within X frames of the requested frame? + int64_t diff = requested_frame - last_frame; + if (diff >= 1 && diff <= 20) + { + // Continue walking the stream + frame = ReadStream(requested_frame); + } + else + { + // Greater than 30 frames away, or backwards, we need to seek to the nearest key frame + if (enable_seek) + // Only seek if enabled + Seek(requested_frame); - // Then continue walking the stream - return ReadStream(requested_frame); - } + else if (!enable_seek && diff < 0) + { + // Start over, since we can't seek, and the requested frame is smaller than our position + Close(); + Open(); + } + // Then continue walking the stream + frame = ReadStream(requested_frame); + } + } + } //omp critical + return frame; } } @@ -540,16 +585,16 @@ num_packets_since_video_frame = 0; // Check the status of a seek (if any) - if (is_seeking) + if (is_seeking) #pragma omp critical (openshot_seek) - check_seek = CheckSeek(true); - else - check_seek = false; - - if (check_seek) { - // Jump to the next iteration of this loop - continue; - } + check_seek = CheckSeek(true); + else + check_seek = false; + + if (check_seek) { + // Jump to the next iteration of this loop + continue; + } // Get the AVFrame from the current packet frame_finished = GetAVFrame(); @@ -562,6 +607,12 @@ // Process Video Packet ProcessVideoPacket(requested_frame); + + if (openshot::Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK) { + // Wait on each OMP task to complete before moving on to the next one. This slows + // down processing considerably, but might be more stable on some systems. + #pragma omp taskwait + } } } @@ -573,7 +624,7 @@ // Check the status of a seek (if any) if (is_seeking) - #pragma omp critical (openshot_seek) + #pragma omp critical (openshot_seek) check_seek = CheckSeek(false); else check_seek = false; @@ -594,17 +645,13 @@ } // Check if working frames are 'finished' - bool is_cache_found = false; if (!is_seeking) { - // Check for any missing frames - CheckMissingFrame(requested_frame); - // Check for final frames CheckWorkingFrames(false, requested_frame); } // Check if requested 'final' frame is available - is_cache_found = (final_cache.GetFrame(requested_frame) != NULL); + bool is_cache_found = (final_cache.GetFrame(requested_frame) != NULL); // Increment frames processed packets_processed++; @@ -616,6 +663,7 @@ } // end while } // end omp single + } // end omp parallel // Debug output @@ -676,28 +724,60 @@ bool FFmpegReader::GetAVFrame() { int frameFinished = -1; + int ret = 0; // Decode video frame AVFrame *next_frame = AV_ALLOCATE_FRAME(); #pragma omp critical (packet_cache) - avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet); - - // is frame finished - if (frameFinished) { - // AVFrames are clobbered on the each call to avcodec_decode_video, so we - // must make a copy of the image data before this method is called again. - pFrame = new AVPicture(); - avpicture_alloc(pFrame, pCodecCtx->pix_fmt, info.width, info.height); - av_picture_copy(pFrame, (AVPicture *) next_frame, pCodecCtx->pix_fmt, info.width, info.height); - - // Detect interlaced frame (only once) - if (!check_interlace) - { - check_interlace = true; - info.interlaced_frame = next_frame->interlaced_frame; - info.top_field_first = next_frame->top_field_first; + #if IS_FFMPEG_3_2 + frameFinished = 0; + ret = avcodec_send_packet(pCodecCtx, packet); + if (ret < 0 || ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Packet not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } + else { + pFrame = new AVFrame(); + while (ret >= 0) { + ret = avcodec_receive_frame(pCodecCtx, next_frame); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; + } + // TODO also handle possible further frames + // Use only the first frame like avcodec_decode_video2 + if (frameFinished == 0 ) { + frameFinished = 1; + av_image_alloc(pFrame->data, pFrame->linesize, info.width, info.height, (AVPixelFormat)(pStream->codecpar->format), 1); + av_image_copy(pFrame->data, pFrame->linesize, (const uint8_t**)next_frame->data, next_frame->linesize, + (AVPixelFormat)(pStream->codecpar->format), info.width, info.height); + if (!check_interlace) { + check_interlace = true; + info.interlaced_frame = next_frame->interlaced_frame; + info.top_field_first = next_frame->top_field_first; + } + } + } + } + #else + avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet); + + // is frame finished + if (frameFinished) { + // AVFrames are clobbered on the each call to avcodec_decode_video, so we + // must make a copy of the image data before this method is called again. + pFrame = AV_ALLOCATE_FRAME(); + avpicture_alloc((AVPicture *) pFrame, pCodecCtx->pix_fmt, info.width, info.height); + av_picture_copy((AVPicture *) pFrame, (AVPicture *) next_frame, pCodecCtx->pix_fmt, info.width, + info.height); + + // Detect interlaced frame (only once) + if (!check_interlace) { + check_interlace = true; + info.interlaced_frame = next_frame->interlaced_frame; + info.top_field_first = next_frame->top_field_first; + } + } + #endif } // deallocate the frame @@ -734,7 +814,7 @@ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckSeek (Too far, seek again)", "is_video_seek", is_video_seek, "max_seeked_frame", max_seeked_frame, "seeking_frame", seeking_frame, "seeking_pts", seeking_pts, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); // Seek again... to the nearest Keyframe - Seek(seeking_frame - (20 * seek_count * seek_count)); + Seek(seeking_frame - (10 * seek_count * seek_count)); } else { @@ -779,11 +859,11 @@ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Before)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); // Init some things local (for OpenMP) - PixelFormat pix_fmt = pCodecCtx->pix_fmt; + PixelFormat pix_fmt = AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx); int height = info.height; int width = info.width; int64_t video_length = info.video_length; - AVPicture *my_frame = pFrame; + AVFrame *my_frame = pFrame; // Add video frame to list of processing video frames const GenericScopedLock lock(processingCriticalSection); @@ -801,9 +881,53 @@ if (pFrameRGB == NULL) throw OutOfBoundsFrame("Convert Image Broke!", current_frame, video_length); - // Determine if video needs to be scaled down (for performance reasons) - // Timelines pass their size to the clips, which pass their size to the readers (as max size) - // If a clip is being scaled larger, it will set max_width and max_height = 0 (which means don't down scale) + // Determine the max size of this source image (based on the timeline's size, the scaling mode, + // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, + // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline + // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in + // the future. + int max_width = Settings::Instance()->MAX_WIDTH; + if (max_width <= 0) + max_width = info.width; + int max_height = Settings::Instance()->MAX_HEIGHT; + if (max_height <= 0) + max_height = info.height; + + Clip* parent = (Clip*) GetClip(); + if (parent) { + if (parent->scale == SCALE_FIT || parent->scale == SCALE_STRETCH) { + // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + max_width = max(float(max_width), max_width * max_scale_x); + max_height = max(float(max_height), max_height * max_scale_y); + + } else if (parent->scale == SCALE_CROP) { + // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + QSize width_size(max_width * max_scale_x, + round(max_width / (float(info.width) / float(info.height)))); + QSize height_size(round(max_height / (float(info.height) / float(info.width))), + max_height * max_scale_y); + // respect aspect ratio + if (width_size.width() >= max_width && width_size.height() >= max_height) { + max_width = max(max_width, width_size.width()); + max_height = max(max_height, width_size.height()); + } + else { + max_width = max(max_width, height_size.width()); + max_height = max(max_height, height_size.height()); + } + + } else { + // No scaling, use original image size (slower) + max_width = info.width; + max_height = info.height; + } + } + + // Determine if image needs to be scaled (for performance reasons) int original_height = height; if (max_width != 0 && max_height != 0 && max_width < width && max_height < height) { // Override width and height (but maintain aspect ratio) @@ -823,17 +947,20 @@ } // Determine required buffer size and allocate buffer - numBytes = avpicture_get_size(PIX_FMT_RGBA, width, height); + numBytes = AV_GET_IMAGE_SIZE(PIX_FMT_RGBA, width, height); + #pragma omp critical (video_buffer) buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t)); - // Assign appropriate parts of buffer to image planes in pFrameRGB - // Note that pFrameRGB is an AVFrame, but AVFrame is a superset - // of AVPicture - avpicture_fill((AVPicture *) pFrameRGB, buffer, PIX_FMT_RGBA, width, height); + // Copy picture data from one AVFrame (or AVPicture) to another one. + AV_COPY_PICTURE_DATA(pFrameRGB, buffer, PIX_FMT_RGBA, width, height); - SwsContext *img_convert_ctx = sws_getContext(info.width, info.height, pCodecCtx->pix_fmt, width, - height, PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); + int scale_mode = SWS_FAST_BILINEAR; + if (openshot::Settings::Instance()->HIGH_QUALITY_SCALING) { + scale_mode = SWS_LANCZOS; + } + SwsContext *img_convert_ctx = sws_getContext(info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx), width, + height, PIX_FMT_RGBA, scale_mode, NULL, NULL, NULL); // Resize / Convert to RGB sws_scale(img_convert_ctx, my_frame->data, my_frame->linesize, 0, @@ -903,21 +1030,53 @@ int data_size = 0; // re-initialize buffer size (it gets changed in the avcodec_decode_audio2 method call) - int buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE; - int used = avcodec_decode_audio4(aCodecCtx, audio_frame, &frame_finished, packet); + int buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE; + #pragma omp critical (ProcessAudioPacket) + { + #if IS_FFMPEG_3_2 + int ret = 0; + frame_finished = 1; + while((packet->size > 0 || (!packet->data && frame_finished)) && ret >= 0) { + frame_finished = 0; + ret = avcodec_send_packet(aCodecCtx, packet); + if (ret < 0 && ret != AVERROR(EINVAL) && ret != AVERROR_EOF) { + avcodec_send_packet(aCodecCtx, NULL); + break; + } + if (ret >= 0) + packet->size = 0; + ret = avcodec_receive_frame(aCodecCtx, audio_frame); + if (ret >= 0) + frame_finished = 1; + if(ret == AVERROR(EINVAL) || ret == AVERROR_EOF) { + avcodec_flush_buffers(aCodecCtx); + ret = 0; + } + if (ret >= 0) { + ret = frame_finished; + } + } + if (!packet->data && !frame_finished) + { + ret = -1; + } + #else + int used = avcodec_decode_audio4(aCodecCtx, audio_frame, &frame_finished, packet); +#endif + } if (frame_finished) { // determine how many samples were decoded - int planar = av_sample_fmt_is_planar(aCodecCtx->sample_fmt); + int planar = av_sample_fmt_is_planar((AVSampleFormat)AV_GET_CODEC_PIXEL_FORMAT(aStream, aCodecCtx)); int plane_size = -1; data_size = av_samples_get_buffer_size(&plane_size, - aCodecCtx->channels, + AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels, audio_frame->nb_samples, - aCodecCtx->sample_fmt, 1); + (AVSampleFormat)(AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx)), 1); // Calculate total number of samples - packet_samples = audio_frame->nb_samples * aCodecCtx->channels; + packet_samples = audio_frame->nb_samples * AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels; } // Estimate the # of samples and the end of this packet's location (to prevent GAPS for the next timestamp) @@ -976,9 +1135,9 @@ // Allocate audio buffer - int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE]; + int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE]; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (ReSample)", "packet_samples", packet_samples, "info.channels", info.channels, "info.sample_rate", info.sample_rate, "aCodecCtx->sample_fmt", aCodecCtx->sample_fmt, "AV_SAMPLE_FMT_S16", AV_SAMPLE_FMT_S16, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (ReSample)", "packet_samples", packet_samples, "info.channels", info.channels, "info.sample_rate", info.sample_rate, "aCodecCtx->sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), "AV_SAMPLE_FMT_S16", AV_SAMPLE_FMT_S16, "", -1); // Create output frame AVFrame *audio_converted = AV_ALLOCATE_FRAME(); @@ -986,23 +1145,23 @@ audio_converted->nb_samples = audio_frame->nb_samples; av_samples_alloc(audio_converted->data, audio_converted->linesize, info.channels, audio_frame->nb_samples, AV_SAMPLE_FMT_S16, 0); - AVAudioResampleContext *avr = NULL; + SWRCONTEXT *avr = NULL; int nb_samples = 0; // setup resample context - avr = avresample_alloc_context(); - av_opt_set_int(avr, "in_channel_layout", aCodecCtx->channel_layout, 0); - av_opt_set_int(avr, "out_channel_layout", aCodecCtx->channel_layout, 0); - av_opt_set_int(avr, "in_sample_fmt", aCodecCtx->sample_fmt, 0); + avr = SWR_ALLOC(); + av_opt_set_int(avr, "in_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); + av_opt_set_int(avr, "out_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); + av_opt_set_int(avr, "in_sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), 0); av_opt_set_int(avr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); av_opt_set_int(avr, "in_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", info.channels, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - int r = avresample_open(avr); + int r = SWR_INIT(avr); // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold @@ -1014,8 +1173,8 @@ memcpy(audio_buf, audio_converted->data[0], audio_converted->nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16) * info.channels); // Deallocate resample buffer - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; // Free AVFrames @@ -1167,7 +1326,7 @@ } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::Seek", "requested_frame", requested_frame, "seek_count", seek_count, "last_frame", last_frame, "processing_video_frames_size", processing_video_frames_size, "processing_audio_frames_size", processing_audio_frames_size, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::Seek", "requested_frame", requested_frame, "seek_count", seek_count, "last_frame", last_frame, "processing_video_frames_size", processing_video_frames_size, "processing_audio_frames_size", processing_audio_frames_size, "video_pts_offset", video_pts_offset); // Wait for any processing frames to complete while (processing_video_frames_size + processing_audio_frames_size > 0) { @@ -1209,7 +1368,7 @@ seek_count++; // If seeking near frame 1, we need to close and re-open the file (this is more reliable than seeking) - int buffer_amount = 6; + int buffer_amount = max(OPEN_MP_NUM_PROCESSORS, 8); if (requested_frame - buffer_amount < 20) { // Close and re-open file (basically seeking to frame 1) @@ -1241,7 +1400,7 @@ { seek_target = ConvertFrameToVideoPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.video_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { - fprintf(stderr, "%s: error while seeking video stream\n", pFormatCtx->filename); + fprintf(stderr, "%s: error while seeking video stream\n", pFormatCtx->AV_FILENAME); } else { // VIDEO SEEK @@ -1255,7 +1414,7 @@ { seek_target = ConvertFrameToAudioPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.audio_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { - fprintf(stderr, "%s: error while seeking audio stream\n", pFormatCtx->filename); + fprintf(stderr, "%s: error while seeking audio stream\n", pFormatCtx->AV_FILENAME); } else { // AUDIO SEEK @@ -1487,7 +1646,7 @@ for (int64_t audio_frame = previous_packet_location.frame; audio_frame < location.frame; audio_frame++) { if (!missing_audio_frames.count(audio_frame)) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (tracking missing frame)", "missing_audio_frame", audio_frame, "previous_audio_frame", previous_packet_location.frame, "new location frame", location.frame, "", -1, "", -1, "", -1); - missing_audio_frames.insert(pair(previous_packet_location.frame - 1, audio_frame)); + missing_audio_frames.insert(pair(audio_frame, previous_packet_location.frame - 1)); } } } @@ -1558,18 +1717,29 @@ // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame", "requested_frame", requested_frame, "has_missing_frames", has_missing_frames, "missing_video_frames.size()", missing_video_frames.size(), "checked_count", checked_count, "", -1, "", -1); - // Missing frames (sometimes frame #'s are skipped due to invalid or missing timestamps) map::iterator itr; bool found_missing_frame = false; - // Check if requested frame is a missing frame - if (missing_video_frames.count(requested_frame) || missing_audio_frames.count(requested_frame)) { - int64_t missing_source_frame = -1; - if (missing_video_frames.count(requested_frame)) - missing_source_frame = missing_video_frames.find(requested_frame)->second; - else if (missing_audio_frames.count(requested_frame)) - missing_source_frame = missing_audio_frames.find(requested_frame)->second; + // Special MP3 Handling (ignore more than 1 video frame) + if (info.has_audio and info.has_video) { + AVCodecID aCodecId = AV_FIND_DECODER_CODEC_ID(aStream); + AVCodecID vCodecId = AV_FIND_DECODER_CODEC_ID(pStream); + // If MP3 with single video frame, handle this special case by copying the previously + // decoded image to the new frame. Otherwise, it will spend a huge amount of + // CPU time looking for missing images for all the audio-only frames. + if (checked_count > 8 && !missing_video_frames.count(requested_frame) && + !processing_audio_frames.count(requested_frame) && processed_audio_frames.count(requested_frame) && + last_frame && last_video_frame->has_image_data && aCodecId == AV_CODEC_ID_MP3 && (vCodecId == AV_CODEC_ID_MJPEGB || vCodecId == AV_CODEC_ID_MJPEG)) { + missing_video_frames.insert(pair(requested_frame, last_video_frame->number)); + missing_video_frames_source.insert(pair(last_video_frame->number, requested_frame)); + missing_frames.Add(last_video_frame); + } + } + + // Check if requested video frame is a missing + if (missing_video_frames.count(requested_frame)) { + int64_t missing_source_frame = missing_video_frames.find(requested_frame)->second; // Increment missing source frame check count (or init to 1) if (checked_frames.count(missing_source_frame) == 0) @@ -1602,21 +1772,26 @@ std::shared_ptr parent_image = parent_frame->GetImage(); if (parent_image) { missing_frame->AddImage(std::shared_ptr(new QImage(*parent_image))); - processed_video_frames[missing_frame->number] = missing_frame->number; - processed_audio_frames[missing_frame->number] = missing_frame->number; + } + } + } - // Move frame to final cache - final_cache.Add(missing_frame); + // Check if requested audio frame is a missing + if (missing_audio_frames.count(requested_frame)) { - // Remove frame from working cache - working_cache.Remove(missing_frame->number); + // Create blank missing frame + std::shared_ptr missing_frame = CreateFrame(requested_frame); - // Update last_frame processed - last_frame = missing_frame->number; - } - } + // Get Samples per frame (for this frame number) + int samples_per_frame = Frame::GetSamplesPerFrame(missing_frame->number, info.fps, info.sample_rate, info.channels); + + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame (Add Silence for Missing Audio Frame)", "requested_frame", requested_frame, "missing_frame->number", missing_frame->number, "samples_per_frame", samples_per_frame, "", -1, "", -1, "", -1); + // Add this frame to the processed map (since it's already done) + missing_frame->AddAudioSilence(samples_per_frame); + processed_audio_frames[missing_frame->number] = missing_frame->number; } return found_missing_frame; @@ -1629,6 +1804,9 @@ bool checked_count_tripped = false; int max_checked_count = 80; + // Check if requested frame is 'missing' + CheckMissingFrame(requested_frame); + while (true) { // Get the front frame of working cache @@ -1639,6 +1817,11 @@ // No frames found break; + // Remove frames which are too old + if (f && f->number < (requested_frame - (OPEN_MP_NUM_PROCESSORS * 2))) { + working_cache.Remove(f->number); + } + // Check if this frame is 'missing' CheckMissingFrame(f->number); @@ -1685,7 +1868,6 @@ } if (info.has_audio && !is_audio_ready) { - const GenericScopedLock lock(processingCriticalSection); // Mark audio as processed, and indicate the frame has audio data is_audio_ready = true; } @@ -1698,10 +1880,15 @@ if ((!end_of_stream && is_video_ready && is_audio_ready) || end_of_stream || is_seek_trash) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (mark frame as final)", "requested_frame", requested_frame, "f->number", f->number, "is_seek_trash", is_seek_trash, "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (mark frame as final)", "requested_frame", requested_frame, "f->number", f->number, "is_seek_trash", is_seek_trash, "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "end_of_stream", end_of_stream); if (!is_seek_trash) { + // Add missing image (if needed - sometimes end_of_stream causes frames with only audio) + if (info.has_video && !is_video_ready && last_video_frame) + // Copy image from last frame + f->AddImage(std::shared_ptr(new QImage(*last_video_frame->GetImage()))); + // Reset counter since last 'final' frame num_checks_since_final = 0; @@ -1742,16 +1929,14 @@ void FFmpegReader::CheckFPS() { check_fps = true; - avpicture_alloc(pFrame, pCodecCtx->pix_fmt, info.width, info.height); int first_second_counter = 0; int second_second_counter = 0; int third_second_counter = 0; int forth_second_counter = 0; int fifth_second_counter = 0; - - int iterations = 0; - int threshold = 500; + int frames_detected = 0; + int64_t pts = 0; // Loop through the stream while (true) @@ -1771,7 +1956,7 @@ UpdatePTSOffset(true); // Get PTS of this packet - int64_t pts = GetVideoPTS(); + pts = GetVideoPTS(); // Remove pFrame RemoveAVFrame(pFrame); @@ -1793,77 +1978,68 @@ forth_second_counter++; else if (video_seconds > 4.0 && video_seconds <= 5.0) fifth_second_counter++; - else - // Too far - break; + + // Increment counters + frames_detected++; } } - - // Increment counters - iterations++; - - // Give up (if threshold exceeded) - if (iterations > threshold) - break; } // Double check that all counters have greater than zero (or give up) - if (second_second_counter == 0 || third_second_counter == 0 || forth_second_counter == 0 || fifth_second_counter == 0) - { - // Seek to frame 1 - Seek(1); - - // exit with no changes to FPS (not enough data to calculate) - return; - } - - int sum_fps = second_second_counter + third_second_counter + forth_second_counter + fifth_second_counter; - int avg_fps = round(sum_fps / 4.0f); - - // Sometimes the FPS is incorrectly detected by FFmpeg. If the 1st and 2nd seconds counters - // agree with each other, we are going to adjust the FPS of this reader instance. Otherwise, print - // a warning message. - - // Get diff from actual frame rate - double fps = info.fps.ToDouble(); - double diff = fps - double(avg_fps); - - // Is difference bigger than 1 frame? - if (diff <= -1 || diff >= 1) - { - // Compare to half the frame rate (the most common type of issue) - double half_fps = Fraction(info.fps.num / 2, info.fps.den).ToDouble(); - diff = half_fps - double(avg_fps); + if (second_second_counter != 0 && third_second_counter != 0 && forth_second_counter != 0 && fifth_second_counter != 0) { + // Calculate average FPS (average of first few seconds) + int sum_fps = second_second_counter + third_second_counter + forth_second_counter + fifth_second_counter; + int avg_fps = round(sum_fps / 4.0f); + + // Update FPS + info.fps = Fraction(avg_fps, 1); + + // Update Duration and Length + info.video_length = frames_detected; + info.duration = frames_detected / (sum_fps / 4.0f); + + // Update video bit rate + info.video_bit_rate = info.file_size / info.duration; + } else if (second_second_counter != 0 && third_second_counter != 0) { + // Calculate average FPS (only on second 2) + int sum_fps = second_second_counter; + + // Update FPS + info.fps = Fraction(sum_fps, 1); + + // Update Duration and Length + info.video_length = frames_detected; + info.duration = frames_detected / float(sum_fps); + + // Update video bit rate + info.video_bit_rate = info.file_size / info.duration; + } else { + // Too short to determine framerate, just default FPS + // Set a few important default video settings (so audio can be divided into frames) + info.fps.num = 30; + info.fps.den = 1; - // Is difference bigger than 1 frame? - if (diff <= -1 || diff >= 1) - { - // Update FPS for this reader instance - info.fps = Fraction(avg_fps, 1); - } - else - { - // Update FPS for this reader instance (to 1/2 the original framerate) - info.fps = Fraction(info.fps.num / 2, info.fps.den); - } + // Calculate number of frames + info.video_length = frames_detected; + info.duration = frames_detected / info.fps.ToFloat(); } - - // Seek to frame 1 - Seek(1); } // Remove AVFrame from cache (and deallocate it's memory) -void FFmpegReader::RemoveAVFrame(AVPicture* remove_frame) +void FFmpegReader::RemoveAVFrame(AVFrame* remove_frame) { // Remove pFrame (if exists) if (remove_frame) { // Free memory - avpicture_free(remove_frame); - - // Delete the object - delete remove_frame; - } + #pragma omp critical (packet_cache) + { + av_freep(&remove_frame->data[0]); +#ifndef WIN32 + AV_FREE_FRAME(&remove_frame); +#endif + } + } } // Remove AVPacket from cache (and deallocate it's memory) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/FFmpegWriter.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/FFmpegWriter.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/FFmpegWriter.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/FFmpegWriter.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -46,7 +46,7 @@ info.has_video = false; // Initialize FFMpeg, and register all formats and codecs - av_register_all(); + AV_REGISTER_ALL // auto detect format auto_detect_format(); @@ -55,16 +55,24 @@ // Open the writer void FFmpegWriter::Open() { - // Open the writer - is_open = true; - - // Prepare streams (if needed) - if (!prepare_streams) - PrepareStreams(); - - // Write header (if needed) - if (!write_header) - WriteHeader(); + if (!is_open) { + // Open the writer + is_open = true; + + // Prepare streams (if needed) + if (!prepare_streams) + PrepareStreams(); + + // Now that all the parameters are set, we can open the audio and video codecs and allocate the necessary encode buffers + if (info.has_video && video_st) + open_video(oc, video_st); + if (info.has_audio && audio_st) + open_audio(oc, audio_st); + + // Write header (if needed) + if (!write_header) + WriteHeader(); + } } // auto detect format (from path) @@ -76,7 +84,7 @@ throw InvalidFormat("Could not deduce output format from file extension.", path); // Allocate the output media context - oc = avformat_alloc_context(); + AV_OUTPUT_CONTEXT(&oc, path.c_str()); if (!oc) throw OutOfMemory("Could not allocate memory for AVFormatContext.", path); @@ -146,7 +154,9 @@ info.pixel_ratio.num = pixel_ratio.num; info.pixel_ratio.den = pixel_ratio.den; } - if (bit_rate >= 1000) + if (bit_rate >= 1000) // bit_rate is the bitrate in b/s + info.video_bit_rate = bit_rate; + if ((bit_rate >= 0) && (bit_rate < 64) ) // bit_rate is the bitrate in crf info.video_bit_rate = bit_rate; info.interlaced_frame = interlaced; @@ -211,12 +221,19 @@ { // Declare codec context AVCodecContext *c = NULL; + AVStream *st = NULL; stringstream convert(value); - if (info.has_video && stream == VIDEO_STREAM && video_st) - c = video_st->codec; - else if (info.has_audio && stream == AUDIO_STREAM && audio_st) - c = audio_st->codec; + if (info.has_video && stream == VIDEO_STREAM && video_st) { + st = video_st; + // Get codec context + c = AV_GET_CODEC_PAR_CONTEXT(st, video_codec); + } + else if (info.has_audio && stream == AUDIO_STREAM && audio_st) { + st = audio_st; + // Get codec context + c = AV_GET_CODEC_PAR_CONTEXT(st, audio_codec); + } else throw NoStreamsFound("The stream was not found. Be sure to call PrepareStreams() first.", path); @@ -226,15 +243,12 @@ // Was a codec / stream found? if (c) // Find AVOption (if it exists) - #if LIBAVFORMAT_VERSION_MAJOR <= 53 - option = av_find_opt(c->priv_data, name.c_str(), NULL, NULL, NULL); - #else - option = av_opt_find(c->priv_data, name.c_str(), NULL, 0, 0); - #endif + option = AV_OPTION_FIND(c->priv_data, name.c_str()); // Was option found? if (option || (name == "g" || name == "qmin" || name == "qmax" || name == "max_b_frames" || name == "mb_decision" || - name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate")) + name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate" || + name == "crf")) { // Check for specific named options if (name == "g") @@ -281,13 +295,63 @@ // Buffer size convert >> c->rc_buffer_size; + else if (name == "crf") { + // encode quality and special settings like lossless + // This might be better in an extra methods as more options + // and way to set quality are possible + #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + switch (c->codec_id) { + #if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : + c->bit_rate = 0; + av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); + break; + #endif + case AV_CODEC_ID_VP8 : + c->bit_rate = 10000000; + av_opt_set_int(c->priv_data, "crf", max(min(stoi(value),63),4), 0); // 4-63 + break; + case AV_CODEC_ID_VP9 : + c->bit_rate = 0; // Must be zero! + av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); // 0-63 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + case AV_CODEC_ID_H264 : + av_opt_set_int(c->priv_data, "crf", min(stoi(value),51), 0); // 0-51 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + } + break; + case AV_CODEC_ID_H265 : + av_opt_set_int(c->priv_data, "crf", min(stoi(value),51), 0); // 0-51 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + default: + // If this codec doesn't support crf calculate a bitrate + // TODO: find better formula + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380.0; + } + else { + mbs *= pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } + #endif + } + else // Set AVOption - #if LIBAVFORMAT_VERSION_MAJOR <= 53 - av_set_string3 (c->priv_data, name.c_str(), value.c_str(), 0, NULL); - #else - av_opt_set (c->priv_data, name.c_str(), value.c_str(), 0); - #endif + AV_OPTION_SET(st, c->priv_data, name.c_str(), value.c_str(), c); ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetOption (" + (string)name + ")", "stream == VIDEO_STREAM", stream == VIDEO_STREAM, "", -1, "", -1, "", -1, "", -1, "", -1); @@ -297,6 +361,18 @@ } +/// Determine if codec name is valid +bool FFmpegWriter::IsValidCodec(string codec_name) { + // Initialize FFMpeg, and register all formats and codecs + AV_REGISTER_ALL + + // Find the codec (if any) + if (avcodec_find_encoder_by_name(codec_name.c_str()) == NULL) + return false; + else + return true; +} + // Prepare & initialize streams and open codecs void FFmpegWriter::PrepareStreams() { @@ -308,12 +384,6 @@ // Initialize the streams (i.e. add the streams) initialize_streams(); - // Now that all the parameters are set, we can open the audio and video codecs and allocate the necessary encode buffers - if (info.has_video && video_st) - open_video(oc, video_st); - if (info.has_audio && audio_st) - open_audio(oc, audio_st); - // Mark as 'prepared' prepare_streams = true; } @@ -331,11 +401,20 @@ } // Force the output filename (which doesn't always happen for some reason) - snprintf(oc->filename, sizeof(oc->filename), "%s", path.c_str()); + snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", path.c_str()); // Write the stream header, if any // TODO: add avoptions / parameters instead of NULL - avformat_write_header(oc, NULL); + + // Add general metadata (if any) + for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) + { + av_dict_set(&oc->metadata, iter->first.c_str(), iter->second.c_str(), 0); + } + + if (avformat_write_header(oc, NULL) != 0) { + throw InvalidFile("Could not write header to file.", path); + }; // Mark as 'written' write_header = true; @@ -370,10 +449,6 @@ else { - // YES, WRITING... so wait until it finishes, before writing again - while (is_writing) - usleep(250000); // sleep for 250 milliseconds - // Write frames to video file write_queued_frames(); } @@ -541,10 +616,12 @@ // Flush encoders void FFmpegWriter::flush_encoders() { - if (info.has_audio && audio_codec && audio_st->codec->codec_type == AVMEDIA_TYPE_AUDIO && audio_codec->frame_size <= 1) - return; - if (info.has_video && video_st->codec->codec_type == AVMEDIA_TYPE_VIDEO && (oc->oformat->flags & AVFMT_RAWPICTURE) && video_codec->codec->id == AV_CODEC_ID_RAWVIDEO) - return; + if (info.has_audio && audio_codec && AV_GET_CODEC_TYPE(audio_st) == AVMEDIA_TYPE_AUDIO && AV_GET_CODEC_ATTRIBUTES(audio_st, audio_codec)->frame_size <= 1) + return; +#if (LIBAVFORMAT_VERSION_MAJOR < 58) + if (info.has_video && video_codec && AV_GET_CODEC_TYPE(video_st) == AVMEDIA_TYPE_VIDEO && (oc->oformat->flags & AVFMT_RAWPICTURE) && AV_FIND_DECODER_CODEC_ID(video_st) == AV_CODEC_ID_RAWVIDEO) + return; +#endif int error_code = 0; int stop_encoding = 1; @@ -568,29 +645,54 @@ int got_packet = 0; int error_code = 0; - #if LIBAVFORMAT_VERSION_MAJOR >= 54 - // Newer versions of FFMpeg - error_code = avcodec_encode_video2(video_codec, &pkt, NULL, &got_packet); - + #if IS_FFMPEG_3_2 + #pragma omp critical (write_video_packet) + { + // Encode video packet (latest version of FFmpeg) + error_code = avcodec_send_frame(video_codec, NULL); + got_packet = 0; + while (error_code >= 0) { + error_code = avcodec_receive_packet(video_codec, &pkt); + if (error_code == AVERROR(EAGAIN)|| error_code == AVERROR_EOF) { + got_packet = 0; + // Write packet + avcodec_flush_buffers(video_codec); + break; + } + if (pkt.pts != AV_NOPTS_VALUE) + pkt.pts = av_rescale_q(pkt.pts, video_codec->time_base, video_st->time_base); + if (pkt.dts != AV_NOPTS_VALUE) + pkt.dts = av_rescale_q(pkt.dts, video_codec->time_base, video_st->time_base); + if (pkt.duration > 0) + pkt.duration = av_rescale_q(pkt.duration, video_codec->time_base, video_st->time_base); + pkt.stream_index = video_st->index; + error_code = av_interleaved_write_frame(oc, &pkt); + } + } #else - // Older versions of FFmpeg (much sloppier) - - // Encode Picture and Write Frame - int video_outbuf_size = 0; - /* encode the image */ - int out_size = avcodec_encode_video(video_codec, NULL, video_outbuf_size, NULL); - - /* if zero size, it means the image was buffered */ - if (out_size > 0) { - if(video_codec->coded_frame->key_frame) - pkt.flags |= AV_PKT_FLAG_KEY; - pkt.data= video_outbuf; - pkt.size= out_size; + #if LIBAVFORMAT_VERSION_MAJOR >= 54 + // Encode video packet (older than FFmpeg 3.2) + error_code = avcodec_encode_video2(video_codec, &pkt, NULL, &got_packet); + + #else + // Encode video packet (even older version of FFmpeg) + int video_outbuf_size = 0; + + /* encode the image */ + int out_size = avcodec_encode_video(video_codec, NULL, video_outbuf_size, NULL); + + /* if zero size, it means the image was buffered */ + if (out_size > 0) { + if(video_codec->coded_frame->key_frame) + pkt.flags |= AV_PKT_FLAG_KEY; + pkt.data= video_outbuf; + pkt.size= out_size; - // got data back (so encode this frame) - got_packet = 1; - } + // got data back (so encode this frame) + got_packet = 1; + } + #endif #endif if (error_code < 0) { @@ -644,7 +746,12 @@ /* encode the image */ int got_packet = 0; + #if IS_FFMPEG_3_2 + avcodec_send_frame(audio_codec, NULL); + got_packet = 0; + #else error_code = avcodec_encode_audio2(audio_codec, &pkt, NULL, &got_packet); + #endif if (error_code < 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } @@ -685,14 +792,14 @@ // Close the video codec void FFmpegWriter::close_video(AVFormatContext *oc, AVStream *st) { - avcodec_close(st->codec); + AV_FREE_CONTEXT(video_codec); video_codec = NULL; } // Close the audio codec void FFmpegWriter::close_audio(AVFormatContext *oc, AVStream *st) { - avcodec_close(st->codec); + AV_FREE_CONTEXT(audio_codec); audio_codec = NULL; // Clear buffers @@ -705,14 +812,14 @@ // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } if (avr_planar) { - avresample_close(avr_planar); - avresample_free(&avr_planar); + SWR_CLOSE(avr_planar); + SWR_FREE(&avr_planar); avr_planar = NULL; } } @@ -736,7 +843,7 @@ // Free the streams for (int i = 0; i < oc->nb_streams; i++) { - av_freep(&oc->streams[i]->codec); + av_freep(AV_GET_CODEC_ATTRIBUTES(&oc->streams[i], &oc->streams[i])); av_freep(&oc->streams[i]); } @@ -789,14 +896,8 @@ throw InvalidCodec("A valid audio codec could not be found for this file.", path); // Create a new audio stream - st = avformat_new_stream(oc, codec); - if (!st) - throw OutOfMemory("Could not allocate memory for the audio stream.", path); - - // Set default values - avcodec_get_context_defaults3(st->codec, codec); + AV_FORMAT_NEW_STREAM(oc, audio_codec, codec, st) - c = st->codec; c->codec_id = codec->id; #if LIBAVFORMAT_VERSION_MAJOR >= 53 c->codec_type = AVMEDIA_TYPE_AUDIO; @@ -858,8 +959,13 @@ // some formats want stream headers to be separate if (oc->oformat->flags & AVFMT_GLOBALHEADER) +#if (LIBAVCODEC_VERSION_MAJOR >= 57) + c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; +#else c->flags |= CODEC_FLAG_GLOBAL_HEADER; +#endif + AV_COPY_PARAMS_FROM_CONTEXT(st, c); ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_audio_stream", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->channels", c->channels, "c->sample_fmt", c->sample_fmt, "c->channel_layout", c->channel_layout, "c->sample_rate", c->sample_rate); return st; @@ -871,20 +977,14 @@ AVCodecContext *c; AVStream *st; - // Find the audio codec + // Find the video codec AVCodec *codec = avcodec_find_encoder_by_name(info.vcodec.c_str()); if (codec == NULL) throw InvalidCodec("A valid video codec could not be found for this file.", path); - // Create a new stream - st = avformat_new_stream(oc, codec); - if (!st) - throw OutOfMemory("Could not allocate memory for the video stream.", path); - - // Set default values - avcodec_get_context_defaults3(st->codec, codec); + // Create a new video stream + AV_FORMAT_NEW_STREAM(oc, video_codec, codec, st) - c = st->codec; c->codec_id = codec->id; #if LIBAVFORMAT_VERSION_MAJOR >= 53 c->codec_type = AVMEDIA_TYPE_VIDEO; @@ -893,7 +993,19 @@ #endif /* Init video encoder options */ - c->bit_rate = info.video_bit_rate; + if (info.video_bit_rate >= 1000) { + c->bit_rate = info.video_bit_rate; + if (info.video_bit_rate >= 1500000) { + c->qmin = 2; + c->qmax = 30; + } + // Here should be the setting for low fixed bitrate + // Defaults are used because mpeg2 otherwise had problems + } + else { + c->qmin = 0; + c->qmax = 63; + } //TODO: Implement variable bitrate feature (which actually works). This implementation throws //invalid bitrate errors and rc buffer underflow errors, etc... @@ -902,8 +1014,6 @@ //c->rc_buffer_size = FFMAX(c->rc_max_rate, 15000000) * 112L / 15000000 * 16384; //if ( !c->rc_initial_buffer_occupancy ) // c->rc_initial_buffer_occupancy = c->rc_buffer_size * 3/4; - c->qmin = 2; - c->qmax = 30; /* resolution must be a multiple of two */ // TODO: require /2 height and width @@ -916,6 +1026,10 @@ identically 1. */ c->time_base.num = info.video_timebase.num; c->time_base.den = info.video_timebase.den; + #if LIBAVFORMAT_VERSION_MAJOR >= 56 + c->framerate = av_inv_q(c->time_base); + #endif + st->avg_frame_rate = av_inv_q(c->time_base); st->time_base.num = info.video_timebase.num; st->time_base.den = info.video_timebase.den; @@ -931,7 +1045,11 @@ c->mb_decision = 2; // some formats want stream headers to be separate if (oc->oformat->flags & AVFMT_GLOBALHEADER) +#if (LIBAVCODEC_VERSION_MAJOR >= 57) + c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; +#else c->flags |= CODEC_FLAG_GLOBAL_HEADER; +#endif // Find all supported pixel formats for this codec const PixelFormat* supported_pixel_formats = codec->pix_fmts; @@ -948,17 +1066,24 @@ // Raw video should use RGB24 c->pix_fmt = PIX_FMT_RGB24; +#if (LIBAVFORMAT_VERSION_MAJOR < 58) if (strcmp(fmt->name, "gif") != 0) // If not GIF format, skip the encoding process // Set raw picture flag (so we don't encode this video) oc->oformat->flags |= AVFMT_RAWPICTURE; +#endif } else { // Set the default codec c->pix_fmt = PIX_FMT_YUV420P; } } + AV_COPY_PARAMS_FROM_CONTEXT(st, c); +#if (LIBAVFORMAT_VERSION_MAJOR < 58) ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "AVFMT_RAWPICTURE", AVFMT_RAWPICTURE, "", -1); +#else + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1); +#endif return st; } @@ -967,10 +1092,10 @@ void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) { AVCodec *codec; - audio_codec = st->codec; + AV_GET_CODEC_FROM_STREAM(st, audio_codec) // Set number of threads equal to number of processors (not to exceed 16) - audio_codec->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + audio_codec->thread_count = min(FF_NUM_PROCESSORS, 16); // Find the audio encoder codec = avcodec_find_encoder_by_name(info.acodec.c_str()); @@ -979,9 +1104,17 @@ if (!codec) throw InvalidCodec("Could not find codec", path); + // Init options + AVDictionary *opts = NULL; + av_dict_set(&opts, "strict", "experimental", 0); + // Open the codec - if (avcodec_open2(audio_codec, codec, NULL) < 0) + if (avcodec_open2(audio_codec, codec, &opts) < 0) throw InvalidCodec("Could not open codec", path); + AV_COPY_PARAMS_FROM_CONTEXT(st, audio_codec); + + // Free options + av_dict_free(&opts); // Calculate the size of the input frame (i..e how many samples per packet), and the output buffer // TODO: Ugly hack for PCM codecs (will be removed ASAP with new PCM support to compute the input frame size in samples @@ -989,7 +1122,8 @@ // No frame size found... so calculate audio_input_frame_size = 50000 / info.channels; - switch (st->codec->codec_id) { + int s = AV_FIND_DECODER_CODEC_ID(st); + switch (s) { case AV_CODEC_ID_PCM_S16LE: case AV_CODEC_ID_PCM_S16BE: case AV_CODEC_ID_PCM_U16LE: @@ -1018,7 +1152,13 @@ audio_encoder_buffer_size = AUDIO_PACKET_ENCODING_SIZE; audio_encoder_buffer = new uint8_t[audio_encoder_buffer_size]; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); + // Add audio metadata (if any) + for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) + { + av_dict_set(&st->metadata, iter->first.c_str(), iter->second.c_str(), 0); + } + + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); } @@ -1026,15 +1166,15 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) { AVCodec *codec; - video_codec = st->codec; + AV_GET_CODEC_FROM_STREAM(st, video_codec) // Set number of threads equal to number of processors (not to exceed 16) - video_codec->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + video_codec->thread_count = min(FF_NUM_PROCESSORS, 16); /* find the video encoder */ codec = avcodec_find_encoder_by_name(info.vcodec.c_str()); if (!codec) - codec = avcodec_find_encoder(video_codec->codec_id); + codec = avcodec_find_encoder(AV_FIND_DECODER_CODEC_ID(st)); if (!codec) throw InvalidCodec("Could not find codec", path); @@ -1042,9 +1182,23 @@ if(video_codec->max_b_frames && video_codec->codec_id != AV_CODEC_ID_MPEG4 && video_codec->codec_id != AV_CODEC_ID_MPEG1VIDEO && video_codec->codec_id != AV_CODEC_ID_MPEG2VIDEO) video_codec->max_b_frames = 0; + // Init options + AVDictionary *opts = NULL; + av_dict_set(&opts, "strict", "experimental", 0); + /* open the codec */ - if (avcodec_open2(video_codec, codec, NULL) < 0) + if (avcodec_open2(video_codec, codec, &opts) < 0) throw InvalidCodec("Could not open codec", path); + AV_COPY_PARAMS_FROM_CONTEXT(st, video_codec); + + // Free options + av_dict_free(&opts); + + // Add video metadata (if any) + for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) + { + av_dict_set(&st->metadata, iter->first.c_str(), iter->second.c_str(), 0); + } ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video", "video_codec->thread_count", video_codec->thread_count, "", -1, "", -1, "", -1, "", -1, "", -1); @@ -1170,7 +1324,7 @@ // setup resample context if (!avr) { - avr = avresample_alloc_context(); + avr = SWR_ALLOC(); av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); av_opt_set_int(avr, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); @@ -1179,12 +1333,12 @@ av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", channels_in_frame, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - avresample_open(avr); + SWR_INIT(avr); } int nb_samples = 0; // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold @@ -1245,7 +1399,7 @@ // setup resample context if (!avr_planar) { - avr_planar = avresample_alloc_context(); + avr_planar = SWR_ALLOC(); av_opt_set_int(avr_planar, "in_channel_layout", info.channel_layout, 0); av_opt_set_int(avr_planar, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr_planar, "in_sample_fmt", output_sample_fmt, 0); @@ -1254,7 +1408,7 @@ av_opt_set_int(avr_planar, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr_planar, "in_channels", info.channels, 0); av_opt_set_int(avr_planar, "out_channels", info.channels, 0); - avresample_open(avr_planar); + SWR_INIT(avr_planar); } // Create input frame (and allocate arrays) @@ -1277,7 +1431,7 @@ av_samples_alloc(frame_final->data, frame_final->linesize, info.channels, frame_final->nb_samples, audio_codec->sample_fmt, 0); // Convert audio samples - int nb_samples = avresample_convert(avr_planar, // audio resample context + int nb_samples = SWR_CONVERT(avr_planar, // audio resample context frame_final->data, // output data pointers frame_final->linesize[0], // output plane size, in bytes. (0 if unknown) frame_final->nb_samples, // maximum number of samples that the output buffer can hold @@ -1298,7 +1452,7 @@ } else { // Create a new array - final_samples = new int16_t[audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))]; + final_samples = (int16_t*)av_malloc(sizeof(int16_t) * audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio into buffer for frame memcpy(final_samples, samples, audio_input_position * av_get_bytes_per_sample(audio_codec->sample_fmt)); @@ -1326,8 +1480,39 @@ /* encode the audio samples */ int got_packet_ptr = 0; - int error_code = avcodec_encode_audio2(audio_codec, &pkt, frame_final, &got_packet_ptr); + #if IS_FFMPEG_3_2 + // Encode audio (latest version of FFmpeg) + int error_code; + int ret = 0; + int frame_finished = 0; + error_code = ret = avcodec_send_frame(audio_codec, frame_final); + if (ret < 0 && ret != AVERROR(EINVAL) && ret != AVERROR_EOF) { + avcodec_send_frame(audio_codec, NULL); + } + else { + if (ret >= 0) + pkt.size = 0; + ret = avcodec_receive_packet(audio_codec, &pkt); + if (ret >= 0) + frame_finished = 1; + if(ret == AVERROR(EINVAL) || ret == AVERROR_EOF) { + avcodec_flush_buffers(audio_codec); + ret = 0; + } + if (ret >= 0) { + ret = frame_finished; + } + } + if (!pkt.data && !frame_finished) + { + ret = -1; + } + got_packet_ptr = ret; + #else + // Encode audio (older versions of FFmpeg) + int error_code = avcodec_encode_audio2(audio_codec, &pkt, frame_final, &got_packet_ptr); + #endif /* if zero size, it means the image was buffered */ if (error_code == 0 && got_packet_ptr) { @@ -1397,7 +1582,7 @@ throw OutOfMemory("Could not allocate AVFrame", path); // Determine required buffer size and allocate buffer - *buffer_size = avpicture_get_size(pix_fmt, width, height); + *buffer_size = AV_GET_IMAGE_SIZE(pix_fmt, width, height); // Create buffer (if not provided) if (!new_buffer) @@ -1405,7 +1590,7 @@ // New Buffer new_buffer = (uint8_t*)av_malloc(*buffer_size * sizeof(uint8_t)); // Attach buffer to AVFrame - avpicture_fill((AVPicture *)new_av_frame, new_buffer, pix_fmt, width, height); + AV_COPY_PICTURE_DATA(new_av_frame, new_buffer, pix_fmt, width, height); new_av_frame->width = width; new_av_frame->height = height; new_av_frame->format = pix_fmt; @@ -1449,10 +1634,14 @@ // Init AVFrame for source image & final (converted image) frame_source = allocate_avframe(PIX_FMT_RGBA, source_image_width, source_image_height, &bytes_source, (uint8_t*) pixels); + #if IS_FFMPEG_3_2 + AVFrame *frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); + #else AVFrame *frame_final = allocate_avframe(video_codec->pix_fmt, info.width, info.height, &bytes_final, NULL); + #endif // Fill with data - avpicture_fill((AVPicture *) frame_source, (uint8_t*)pixels, PIX_FMT_RGBA, source_image_width, source_image_height); + AV_COPY_PICTURE_DATA(frame_source, (uint8_t*)pixels, PIX_FMT_RGBA, source_image_width, source_image_height); ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::process_video_packet", "frame->number", frame->number, "bytes_source", bytes_source, "bytes_final", bytes_final, "", -1, "", -1, "", -1); // Resize & convert pixel format @@ -1473,6 +1662,9 @@ // write video frame bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* frame_final) { +#if (LIBAVFORMAT_VERSION_MAJOR >= 58) + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1, "", -1, "", -1); +#else ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags & AVFMT_RAWPICTURE", oc->oformat->flags & AVFMT_RAWPICTURE, "", -1, "", -1, "", -1, "", -1); if (oc->oformat->flags & AVFMT_RAWPICTURE) { @@ -1500,7 +1692,9 @@ // Deallocate packet AV_FREE_PACKET(&pkt); - } else { + } else +#endif + { AVPacket pkt; av_init_packet(&pkt); @@ -1520,30 +1714,60 @@ /* encode the image */ int got_packet_ptr = 0; int error_code = 0; - #if LIBAVFORMAT_VERSION_MAJOR >= 54 - // Newer versions of FFMpeg - error_code = avcodec_encode_video2(video_codec, &pkt, frame_final, &got_packet_ptr); - + #if IS_FFMPEG_3_2 + // Write video packet (latest version of FFmpeg) + int frameFinished = 0; + int ret = avcodec_send_frame(video_codec, frame_final); + error_code = ret; + if (ret < 0 ) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet (Frame not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + if (ret == AVERROR(EAGAIN) ) + cerr << "Frame EAGAIN" << "\n"; + if (ret == AVERROR_EOF ) + cerr << "Frame AVERROR_EOF" << "\n"; + avcodec_send_frame(video_codec, NULL); + } + else { + while (ret >= 0) { + ret = avcodec_receive_packet(video_codec, &pkt); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + avcodec_flush_buffers(video_codec); + got_packet_ptr = 0; + break; + } + if (ret == 0) { + got_packet_ptr = 1; + break; + } + } + } #else - // Older versions of FFmpeg (much sloppier) - - // Encode Picture and Write Frame - int video_outbuf_size = 200000; - video_outbuf = (uint8_t*) av_malloc(200000); + #if LIBAVFORMAT_VERSION_MAJOR >= 54 + // Write video packet (older than FFmpeg 3.2) + error_code = avcodec_encode_video2(video_codec, &pkt, frame_final, &got_packet_ptr); + if (error_code != 0 ) + cerr << "Frame AVERROR_EOF" << "\n"; + if (got_packet_ptr == 0 ) + cerr << "Frame gotpacket error" << "\n"; + #else + // Write video packet (even older versions of FFmpeg) + int video_outbuf_size = 200000; + video_outbuf = (uint8_t*) av_malloc(200000); - /* encode the image */ - int out_size = avcodec_encode_video(video_codec, video_outbuf, video_outbuf_size, frame_final); + /* encode the image */ + int out_size = avcodec_encode_video(video_codec, video_outbuf, video_outbuf_size, frame_final); - /* if zero size, it means the image was buffered */ - if (out_size > 0) { - if(video_codec->coded_frame->key_frame) - pkt.flags |= AV_PKT_FLAG_KEY; - pkt.data= video_outbuf; - pkt.size= out_size; + /* if zero size, it means the image was buffered */ + if (out_size > 0) { + if(video_codec->coded_frame->key_frame) + pkt.flags |= AV_PKT_FLAG_KEY; + pkt.data= video_outbuf; + pkt.size= out_size; - // got data back (so encode this frame) - got_packet_ptr = 1; - } + // got data back (so encode this frame) + got_packet_ptr = 1; + } + #endif #endif /* if zero size, it means the image was buffered */ @@ -1593,15 +1817,16 @@ // Init a collection of software rescalers (thread safe) void FFmpegWriter::InitScalers(int source_width, int source_height) { - // Get the codec - AVCodecContext *c; - c = video_st->codec; + int scale_mode = SWS_FAST_BILINEAR; + if (openshot::Settings::Instance()->HIGH_QUALITY_SCALING) { + scale_mode = SWS_LANCZOS; + } // Init software rescalers vector (many of them, one for each thread) for (int x = 0; x < num_of_rescalers; x++) { // Init the software scaler from FFMpeg - img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, c->pix_fmt, SWS_BILINEAR, NULL, NULL, NULL); + img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), scale_mode, NULL, NULL, NULL); // Add rescaler to vector image_rescalers.push_back(img_convert_ctx); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Frame.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Frame.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Frame.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Frame.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -31,8 +31,9 @@ using namespace openshot; // Constructor - blank frame (300x200 blank image, 48kHz audio silence) -Frame::Frame() : number(1), pixel_ratio(1,1), channels(2), width(1), height(1), - channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false) +Frame::Frame() : number(1), pixel_ratio(1,1), channels(2), width(1), height(1), color("#000000"), + channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false), + max_audio_sample(0) { // Init the image magic and audio buffer audio = std::shared_ptr(new juce::AudioSampleBuffer(channels, 0)); @@ -43,8 +44,9 @@ // Constructor - image only (48kHz audio silence) Frame::Frame(int64_t number, int width, int height, string color) - : number(number), pixel_ratio(1,1), channels(2), width(width), height(height), - channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false) + : number(number), pixel_ratio(1,1), channels(2), width(width), height(height), color(color), + channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false), + max_audio_sample(0) { // Init the image magic and audio buffer audio = std::shared_ptr(new juce::AudioSampleBuffer(channels, 0)); @@ -55,8 +57,9 @@ // Constructor - audio only (300x200 blank image) Frame::Frame(int64_t number, int samples, int channels) : - number(number), pixel_ratio(1,1), channels(channels), width(1), height(1), - channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false) + number(number), pixel_ratio(1,1), channels(channels), width(1), height(1), color("#000000"), + channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false), + max_audio_sample(0) { // Init the image magic and audio buffer audio = std::shared_ptr(new juce::AudioSampleBuffer(channels, samples)); @@ -67,8 +70,9 @@ // Constructor - image & audio Frame::Frame(int64_t number, int width, int height, string color, int samples, int channels) - : number(number), pixel_ratio(1,1), channels(channels), width(width), height(height), - channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false) + : number(number), pixel_ratio(1,1), channels(channels), width(width), height(height), color(color), + channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false), + max_audio_sample(0) { // Init the image magic and audio buffer audio = std::shared_ptr(new juce::AudioSampleBuffer(channels, samples)); @@ -85,20 +89,33 @@ DeepCopy(other); } +// Assignment operator +Frame& Frame::operator= (const Frame& other) +{ + // copy pointers and data + DeepCopy(other); + + return *this; +} + // Copy data and pointers from another Frame instance void Frame::DeepCopy(const Frame& other) { number = other.number; - image = std::shared_ptr(new QImage(*(other.image))); - audio = std::shared_ptr(new juce::AudioSampleBuffer(*(other.audio))); - pixel_ratio = Fraction(other.pixel_ratio.num, other.pixel_ratio.den); channels = other.channels; + width = other.width; + height = other.height; channel_layout = other.channel_layout; has_audio_data = other.has_image_data; has_image_data = other.has_image_data; sample_rate = other.sample_rate; + pixel_ratio = Fraction(other.pixel_ratio.num, other.pixel_ratio.den); + color = other.color; - + if (other.image) + image = std::shared_ptr(new QImage(*(other.image))); + if (other.audio) + audio = std::shared_ptr(new juce::AudioSampleBuffer(*(other.audio))); if (other.wave_image) wave_image = std::shared_ptr(new QImage(*(other.wave_image))); } @@ -162,7 +179,7 @@ QVector labels; // Calculate width of an image based on the # of samples - int total_samples = audio->getNumSamples(); + int total_samples = GetAudioSamplesCount(); if (total_samples > 0) { // If samples are present... @@ -180,7 +197,7 @@ // Get audio for this channel const float *samples = audio->getReadPointer(channel); - for (int sample = 0; sample < audio->getNumSamples(); sample++, X++) + for (int sample = 0; sample < GetAudioSamplesCount(); sample++, X++) { // Sample value (scaled to -100 to 100) float value = samples[sample] * 100; @@ -322,7 +339,7 @@ float *output = NULL; AudioSampleBuffer *buffer(audio.get()); int num_of_channels = audio->getNumChannels(); - int num_of_samples = audio->getNumSamples(); + int num_of_samples = GetAudioSamplesCount(); // Resample to new sample rate (if needed) if (new_sample_rate != sample_rate) @@ -368,7 +385,7 @@ float *output = NULL; AudioSampleBuffer *buffer(audio.get()); int num_of_channels = audio->getNumChannels(); - int num_of_samples = audio->getNumSamples(); + int num_of_samples = GetAudioSamplesCount(); // Resample to new sample rate (if needed) if (new_sample_rate != sample_rate && resampler) @@ -421,10 +438,7 @@ int Frame::GetAudioSamplesCount() { const GenericScopedLock lock(addingAudioSection); - if (audio) - return audio->getNumSamples(); - else - return 0; + return max_audio_sample; } juce::AudioSampleBuffer *Frame::GetAudioSampleBuffer() @@ -453,7 +467,7 @@ // Check for blank image if (!image) // Fill with black - AddColor(width, height, "#000000"); + AddColor(width, height, color); // Return array of pixel packets return image->bits(); @@ -498,6 +512,8 @@ // Subtract the previous frame's total samples with this frame's total samples. Not all sample rates can // be evenly divided into frames, so each frame can have have different # of samples. int samples_per_frame = round(total_samples - previous_samples); + if (samples_per_frame < 0) + samples_per_frame = 0; return samples_per_frame; } @@ -565,18 +581,16 @@ // Thumbnail the frame image to the specified path. The image format is determined from the extension (i.e. image.PNG, image.JPEG) void Frame::Thumbnail(string path, int new_width, int new_height, string mask_path, string overlay_path, - string background_color, bool ignore_aspect, string format, int quality) { + string background_color, bool ignore_aspect, string format, int quality, float rotate) { // Create blank thumbnail image & fill background color std::shared_ptr thumbnail = std::shared_ptr(new QImage(new_width, new_height, QImage::Format_RGBA8888)); thumbnail->fill(QColor(QString::fromStdString(background_color))); - // Create transform and painter - QTransform transform; + // Create painter QPainter painter(thumbnail.get()); painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing, true); - // Get preview image std::shared_ptr previewImage = GetImage(); @@ -603,6 +617,18 @@ int x = (new_width - previewImage->size().width()) / 2.0; // center int y = (new_height - previewImage->size().height()) / 2.0; // center painter.setCompositionMode(QPainter::CompositionMode_SourceOver); + + + // Create transform and rotate (if needed) + QTransform transform; + float origin_x = previewImage->width() / 2.0; + float origin_y = previewImage->height() / 2.0; + transform.translate(origin_x, origin_y); + transform.rotate(rotate); + transform.translate(-origin_x,-origin_y); + painter.setTransform(transform); + + // Draw image onto QImage painter.drawImage(x, y, *previewImage); @@ -678,8 +704,11 @@ } // Add (or replace) pixel data to the frame (based on a solid color) -void Frame::AddColor(int new_width, int new_height, string color) +void Frame::AddColor(int new_width, int new_height, string new_color) { + // Set color + color = new_color; + // Create new image object, and fill with pixel data const GenericScopedLock lock(addingImageSection); #pragma omp critical (AddImage) @@ -713,7 +742,7 @@ // Always convert to RGBA8888 (if different) if (image->format() != QImage::Format_RGBA8888) - image->convertToFormat(QImage::Format_RGBA8888); + *image = image->convertToFormat(QImage::Format_RGBA8888); // Update height and width width = image->width(); @@ -737,7 +766,7 @@ // Always convert to RGBA8888 (if different) if (image->format() != QImage::Format_RGBA8888) - image->convertToFormat(QImage::Format_RGBA8888); + *image = image->convertToFormat(QImage::Format_RGBA8888); // Update height and width width = image->width(); @@ -802,6 +831,9 @@ audio->setSize(channels, length, true, true, false); channel_layout = layout; sample_rate = rate; + + // Calculate max audio sample added + max_audio_sample = length; } // Add audio samples to a specific channel @@ -809,8 +841,11 @@ const GenericScopedLock lock(addingAudioSection); #pragma omp critical (adding_audio) { + // Clamp starting sample to 0 + int destStartSampleAdjusted = max(destStartSample, 0); + // Extend audio container to hold more (or less) samples and channels.. if needed - int new_length = destStartSample + numSamples; + int new_length = destStartSampleAdjusted + numSamples; int new_channel_length = audio->getNumChannels(); if (destChannel >= new_channel_length) new_channel_length = destChannel + 1; @@ -819,11 +854,15 @@ // Clear the range of samples first (if needed) if (replaceSamples) - audio->clear(destChannel, destStartSample, numSamples); + audio->clear(destChannel, destStartSampleAdjusted, numSamples); // Add samples to frame's audio buffer - audio->addFrom(destChannel, destStartSample, source, numSamples, gainToApplyToSource); + audio->addFrom(destChannel, destStartSampleAdjusted, source, numSamples, gainToApplyToSource); has_audio_data = true; + + // Calculate max audio sample added + if (new_length > max_audio_sample) + max_audio_sample = new_length; } } @@ -842,7 +881,7 @@ // Check for blank image if (!image) // Fill with black - AddColor(width, height, "#000000"); + AddColor(width, height, color); return image; } @@ -910,7 +949,7 @@ void Frame::Play() { // Check if samples are present - if (!audio->getNumSamples()) + if (!GetAudioSamplesCount()) return; AudioDeviceManager deviceManager; @@ -992,4 +1031,8 @@ audio->setSize(channels, numSamples, false, true, false); audio->clear(); has_audio_data = true; + + // Calculate max audio sample added + if (numSamples > max_audio_sample) + max_audio_sample = numSamples; } diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/FrameMapper.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/FrameMapper.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/FrameMapper.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/FrameMapper.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -31,7 +31,7 @@ using namespace openshot; FrameMapper::FrameMapper(ReaderBase *reader, Fraction target, PulldownType target_pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout) : - reader(reader), target(target), pulldown(target_pulldown), is_dirty(true), avr(NULL), timeline_frame_offset(0) + reader(reader), target(target), pulldown(target_pulldown), is_dirty(true), avr(NULL) { // Set the original frame rate from the reader original = Fraction(reader->info.fps.num, reader->info.fps.den); @@ -54,9 +54,6 @@ // Adjust cache size based on size of frame and audio final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); - - // init mapping between original and target frames - Init(); } // Destructor @@ -205,22 +202,23 @@ } } else { - // Map the remaining framerates using a simple Keyframe curve - // Calculate the difference (to be used as a multiplier) + // Map the remaining framerates using a linear algorithm double rate_diff = target.ToDouble() / original.ToDouble(); int64_t new_length = reader->info.video_length * rate_diff; - // Build curve for framerate mapping - Keyframe rate_curve; - rate_curve.AddPoint(1, 1, LINEAR); - rate_curve.AddPoint(new_length, reader->info.video_length, LINEAR); + // Calculate the value difference + double value_increment = (reader->info.video_length + 1) / (double) (new_length); // Loop through curve, and build list of frames + double original_frame_num = 1.0f; for (int64_t frame_num = 1; frame_num <= new_length; frame_num++) { // Add 2 fields per frame - AddField(rate_curve.GetInt(frame_num)); - AddField(rate_curve.GetInt(frame_num)); + AddField(round(original_frame_num)); + AddField(round(original_frame_num)); + + // Increment original frame number + original_frame_num += value_increment; } } @@ -241,7 +239,7 @@ if (field % 2 == 0 && field > 0) { // New frame number - int64_t frame_number = field / 2 + timeline_frame_offset; + int64_t frame_number = field / 2; // Set the bottom frame if (f.isOdd) @@ -310,6 +308,11 @@ MappedFrame FrameMapper::GetMappedFrame(int64_t TargetFrameNumber) { + // Check if mappings are dirty (and need to be recalculated) + if (is_dirty) + // Recalculate mappings + Init(); + // Ignore mapping on single image readers if (info.has_video and !info.has_audio and info.has_single_image) { // Return the same number @@ -346,15 +349,12 @@ std::shared_ptr new_frame; // Init some basic properties about this frame (keep sample rate and # channels the same as the original reader for now) - int samples_in_frame = Frame::GetSamplesPerFrame(number + timeline_frame_offset, target, reader->info.sample_rate, reader->info.channels); + int samples_in_frame = Frame::GetSamplesPerFrame(number, target, reader->info.sample_rate, reader->info.channels); try { // Debug output ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - // Set max image size (used for performance optimization) - reader->SetMaxSize(max_width, max_height); - // Attempt to get a frame (but this could fail if a reader has just been closed) new_frame = reader->GetFrame(number); @@ -376,6 +376,7 @@ new_frame = std::make_shared(number, info.width, info.height, "#000000", samples_in_frame, reader->info.channels); new_frame->SampleRate(reader->info.sample_rate); new_frame->ChannelsLayout(info.channel_layout); + new_frame->AddAudioSilence(samples_in_frame); return new_frame; } @@ -421,7 +422,7 @@ // Get # of channels in the actual frame int channels_in_frame = mapped_frame->GetAudioChannelsCount(); - int samples_in_frame = Frame::GetSamplesPerFrame(frame_number + timeline_frame_offset, target, mapped_frame->SampleRate(), channels_in_frame); + int samples_in_frame = Frame::GetSamplesPerFrame(frame_number, target, mapped_frame->SampleRate(), channels_in_frame); // Determine if mapped frame is identical to source frame // including audio sample distribution according to mapped.Samples, @@ -483,7 +484,7 @@ // converter isn't input limited. const int EXTRA_INPUT_SAMPLES = 20; - // Extend end sample count by an addtional EXTRA_INPUT_SAMPLES samples + // Extend end sample count by an additional EXTRA_INPUT_SAMPLES samples copy_samples.sample_end += EXTRA_INPUT_SAMPLES; int samples_per_end_frame = Frame::GetSamplesPerFrame(copy_samples.frame_end, original, @@ -650,8 +651,8 @@ // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } } @@ -741,30 +742,20 @@ // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } - - // Re-init mapping - Init(); -} - -// Set offset relative to parent timeline -void FrameMapper::SetTimelineFrameOffset(int64_t offset) -{ - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::SetTimelineFrameOffset", "offset", offset, "", -1, "", -1, "", -1, "", -1, "", -1); - - // Mark as dirty - is_dirty = true; - - // Used to correctly align audio sample distribution - timeline_frame_offset = offset; } // Resample audio and map channels (if needed) void FrameMapper::ResampleMappedAudio(std::shared_ptr frame, int64_t original_frame_number) { + // Check if mappings are dirty (and need to be recalculated) + if (is_dirty) + // Recalculate mappings + Init(); + // Init audio buffers / variables int total_frame_samples = 0; int channels_in_frame = frame->GetAudioChannelsCount(); @@ -813,7 +804,7 @@ } // Update total samples & input frame size (due to bigger or smaller data types) - total_frame_samples = Frame::GetSamplesPerFrame(frame->number + timeline_frame_offset, target, info.sample_rate, info.channels); + total_frame_samples = Frame::GetSamplesPerFrame(frame->number, target, info.sample_rate, info.channels); ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio (adjust # of samples)", "total_frame_samples", total_frame_samples, "info.sample_rate", info.sample_rate, "sample_rate_in_frame", sample_rate_in_frame, "info.channels", info.channels, "channels_in_frame", channels_in_frame, "original_frame_number", original_frame_number); @@ -829,7 +820,7 @@ // setup resample context if (!avr) { - avr = avresample_alloc_context(); + avr = SWR_ALLOC(); av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); av_opt_set_int(avr, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); @@ -838,11 +829,11 @@ av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", channels_in_frame, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - avresample_open(avr); + SWR_INIT(avr); } // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/KeyFrame.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/KeyFrame.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/KeyFrame.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/KeyFrame.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -296,17 +296,48 @@ Process(); // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return long(round(Values[index].IsIncreasing())); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return long(round(Values[0].IsIncreasing())); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return long(round(Values[Values.size() - 1].IsIncreasing())); + if (index >= 1 && (index + 1) < Values.size()) { + int64_t current_value = GetLong(index); + int64_t previous_value = 0; + int64_t next_value = 0; + int64_t previous_repeats = 0; + int64_t next_repeats = 0; + + // Loop backwards and look for the next unique value + for (vector::iterator backwards_it = Values.begin() + index; backwards_it != Values.begin(); backwards_it--) { + previous_value = long(round((*backwards_it).Y)); + if (previous_value == current_value) { + // Found same value + previous_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + // Loop forwards and look for the next unique value + for (vector::iterator forwards_it = Values.begin() + (index + 1); forwards_it != Values.end(); forwards_it++) { + next_value = long(round((*forwards_it).Y)); + if (next_value == current_value) { + // Found same value + next_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + if (current_value < next_value) { + // Increasing + return true; + } + else if (current_value >= next_value) { + // Decreasing + return false; + } + } else - // return the default direction of most curves (i.e. increasing is true) + // return default true (since most curves increase) return true; } @@ -385,6 +416,7 @@ } // Get the fraction that represents how many times this value is repeated in the curve +// This is depreciated and will be removed soon. Fraction Keyframe::GetRepeatFraction(int64_t index) { // Check if it needs to be processed @@ -392,17 +424,42 @@ Process(); // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return Values[index].Repeat(); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return Values[0].Repeat(); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return Values[Values.size() - 1].Repeat(); + if (index >= 1 && (index + 1) < Values.size()) { + int64_t current_value = GetLong(index); + int64_t previous_value = 0; + int64_t next_value = 0; + int64_t previous_repeats = 0; + int64_t next_repeats = 0; + + // Loop backwards and look for the next unique value + for (vector::iterator backwards_it = Values.begin() + index; backwards_it != Values.begin(); backwards_it--) { + previous_value = long(round((*backwards_it).Y)); + if (previous_value == current_value) { + // Found same value + previous_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + // Loop forwards and look for the next unique value + for (vector::iterator forwards_it = Values.begin() + (index + 1); forwards_it != Values.end(); forwards_it++) { + next_value = long(round((*forwards_it).Y)); + if (next_value == current_value) { + // Found same value + next_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + int64_t total_repeats = previous_repeats + next_repeats; + return Fraction(previous_repeats, total_repeats); + } else - // return a blank coordinate (0,0) + // return a blank coordinate return Fraction(1,1); } @@ -414,17 +471,48 @@ Process(); // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return Values[index].Delta(); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return Values[0].Delta(); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return Values[Values.size() - 1].Delta(); + if (index >= 1 && (index + 1) < Values.size()) { + int64_t current_value = GetLong(index); + int64_t previous_value = 0; + int64_t next_value = 0; + int64_t previous_repeats = 0; + int64_t next_repeats = 0; + + // Loop backwards and look for the next unique value + for (vector::iterator backwards_it = Values.begin() + index; backwards_it != Values.begin(); backwards_it--) { + previous_value = long(round((*backwards_it).Y)); + if (previous_value == current_value) { + // Found same value + previous_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + // Loop forwards and look for the next unique value + for (vector::iterator forwards_it = Values.begin() + (index + 1); forwards_it != Values.end(); forwards_it++) { + next_value = long(round((*forwards_it).Y)); + if (next_value == current_value) { + // Found same value + next_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + // Check for matching previous value (special case for 1st element) + if (current_value == previous_value) + previous_value = 0; + + if (previous_repeats == 1) + return current_value - previous_value; + else + return 0.0; + } else - // return a blank coordinate (0,0) + // return a blank coordinate return 0.0; } @@ -529,7 +617,7 @@ for (vector::iterator it = Values.begin() + 1; it != Values.end(); it++) { Coordinate c = *it; - cout << long(round(c.X)) << "\t" << c.Y << "\t" << c.IsIncreasing() << "\t" << c.Repeat().num << "\t" << c.Repeat().den << "\t" << c.Delta() << endl; + cout << long(round(c.X)) << "\t" << c.Y << "\t" << IsIncreasing(c.X) << "\t" << GetRepeatFraction(c.X).num << "\t" << GetRepeatFraction(c.X).den << "\t" << GetDelta(c.X) << endl; } } @@ -567,69 +655,6 @@ // process segment p1,p2 ProcessSegment(x, p1, p2); } - - // Loop through each Value, and set the direction of the coordinate. This is used - // when time mapping, to determine what direction the audio waveforms play. - bool increasing = true; - int repeat_count = 1; - int64_t last_value = 0; - for (vector::iterator it = Values.begin() + 1; it != Values.end(); it++) { - int current_value = long(round((*it).Y)); - int64_t next_value = long(round((*it).Y)); - int64_t prev_value = long(round((*it).Y)); - if (it + 1 != Values.end()) - next_value = long(round((*(it + 1)).Y)); - if (it - 1 >= Values.begin()) - prev_value = long(round((*(it - 1)).Y)); - - // Loop forward and look for the next unique value (to determine direction) - for (vector::iterator direction_it = it + 1; direction_it != Values.end(); direction_it++) { - int64_t next = long(round((*direction_it).Y)); - - // Detect direction - if (current_value < next) - { - increasing = true; - break; - } - else if (current_value > next) - { - increasing = false; - break; - } - } - - // Set direction - (*it).IsIncreasing(increasing); - - // Detect repeated Y value - if (current_value == last_value) - // repeated, so increment count - repeat_count++; - else - // reset repeat counter - repeat_count = 1; - - // Detect how many 'more' times it's repeated - int additional_repeats = 0; - for (vector::iterator repeat_it = it + 1; repeat_it != Values.end(); repeat_it++) { - int64_t next = long(round((*repeat_it).Y)); - if (next == current_value) - // repeated, so increment count - additional_repeats++; - else - break; // stop looping - } - - // Set repeat fraction - (*it).Repeat(Fraction(repeat_count, repeat_count + additional_repeats)); - - // Set delta (i.e. different from previous unique Y value) - (*it).Delta(current_value - last_value); - - // track the last value - last_value = current_value; - } } // reset flag diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Qt/AudioPlaybackThread.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Qt/AudioPlaybackThread.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Qt/AudioPlaybackThread.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Qt/AudioPlaybackThread.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -61,7 +61,7 @@ audioDeviceManager.dispatchPendingMessages(); } - // Construtor + // Constructor AudioPlaybackThread::AudioPlaybackThread() : Thread("audio-playback") , player() @@ -165,8 +165,7 @@ transport.start(); while (!threadShouldExit() && transport.isPlaying() && is_playing) - sleep(100); - + usleep(2500); // Stop audio and shutdown transport Stop(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Qt/PlayerPrivate.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Qt/PlayerPrivate.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Qt/PlayerPrivate.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Qt/PlayerPrivate.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -129,7 +129,7 @@ } // Sleep (leaving the video frame on the screen for the correct amount of time) - if (sleep_time > 0) sleep(sleep_time); + if (sleep_time > 0) usleep(sleep_time * 1000); } } @@ -149,7 +149,7 @@ else { // Update cache on which frame was retrieved - videoCache->current_display_frame = video_position; + videoCache->setCurrentFramePosition(video_position); // return frame from reader return reader->GetFrame(video_position); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Qt/VideoCacheThread.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Qt/VideoCacheThread.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Qt/VideoCacheThread.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Qt/VideoCacheThread.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -102,12 +102,18 @@ // Ignore out of bounds frame exceptions } + // Is cache position behind current display frame? + if (position < current_display_frame) { + // Jump ahead + position = current_display_frame; + } + // Increment frame number position++; } // Sleep for 1 frame length - sleep(frame_time); + usleep(frame_time * 1000); } return; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/QtImageReader.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/QtImageReader.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/QtImageReader.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/QtImageReader.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -26,6 +26,18 @@ */ #include "../include/QtImageReader.h" +#include "../include/Settings.h" +#include "../include/Clip.h" +#include "../include/CacheMemory.h" +#include +#include +#include + +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + #include "ResvgQt.h" +#endif using namespace openshot; @@ -51,12 +63,41 @@ // Open reader if not already open if (!is_open) { - // Attempt to open file + bool success = true; image = std::shared_ptr(new QImage()); - bool success = image->load(QString::fromStdString(path)); - // Set pixel format - image = std::shared_ptr(new QImage(image->convertToFormat(QImage::Format_RGBA8888))); +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + // Only use resvg for files ending in '.svg' or '.svgz' + if (path.find(".svg") != std::string::npos || + path.find(".svgz") != std::string::npos) { + + ResvgRenderer renderer(QString::fromStdString(path)); + if (!renderer.isValid()) { + success = false; + } else { + + image = std::shared_ptr(new QImage(renderer.defaultSize(), QImage::Format_RGBA8888)); + image->fill(Qt::transparent); + + QPainter p(image.get()); + renderer.render(&p); + p.end(); + } + + } else { + // Attempt to open file (old method) + success = image->load(QString::fromStdString(path)); + if (success) + image = std::shared_ptr(new QImage(image->convertToFormat(QImage::Format_RGBA8888))); + } +#else + // Attempt to open file using Qt's build in image processing capabilities + success = image->load(QString::fromStdString(path)); + if (success) + image = std::shared_ptr(new QImage(image->convertToFormat(QImage::Format_RGBA8888))); +#endif if (!success) // raise exception @@ -111,21 +152,6 @@ } } -void QtImageReader::SetMaxSize(int width, int height) -{ - // Determine if we need to scale the image (for performance reasons) - // The timeline passes its size to the clips, which pass their size to the readers, and eventually here - // A max_width/max_height = 0 means do not scale (probably because we are scaling the image larger than 100%) - - // Remove cache that is no longer valid (if needed) - if (cached_image && (cached_image->width() != width && cached_image->height() != height)) - // Expire this cache - cached_image.reset(); - - max_width = width; - max_height = height; -} - // Get an openshot::Frame object for a specific frame number of this reader. std::shared_ptr QtImageReader::GetFrame(int64_t requested_frame) { @@ -133,39 +159,96 @@ if (!is_open) throw ReaderClosed("The Image is closed. Call Open() before calling this method.", path); - if (max_width != 0 && max_height != 0 && max_width < info.width && max_height < info.height) - { - // Scale image smaller (or use a previous scaled image) - if (!cached_image) { - // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(getFrameCriticalSection); + // Create a scoped lock, allowing only a single thread to run the following code at one time + const GenericScopedLock lock(getFrameCriticalSection); + // Determine the max size of this source image (based on the timeline's size, the scaling mode, + // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, + // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline + // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in + // the future. + int max_width = Settings::Instance()->MAX_WIDTH; + if (max_width <= 0) + max_width = info.width; + int max_height = Settings::Instance()->MAX_HEIGHT; + if (max_height <= 0) + max_height = info.height; + + Clip* parent = (Clip*) GetClip(); + if (parent) { + if (parent->scale == SCALE_FIT || parent->scale == SCALE_STRETCH) { + // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + max_width = max(float(max_width), max_width * max_scale_x); + max_height = max(float(max_height), max_height * max_scale_y); + + } else if (parent->scale == SCALE_CROP) { + // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + QSize width_size(max_width * max_scale_x, + round(max_width / (float(info.width) / float(info.height)))); + QSize height_size(round(max_height / (float(info.height) / float(info.width))), + max_height * max_scale_y); + // respect aspect ratio + if (width_size.width() >= max_width && width_size.height() >= max_height) { + max_width = max(max_width, width_size.width()); + max_height = max(max_height, width_size.height()); + } + else { + max_width = max(max_width, height_size.width()); + max_height = max(max_height, height_size.height()); + } + + } else { + // No scaling, use original image size (slower) + max_width = info.width; + max_height = info.height; + } + } + + // Scale image smaller (or use a previous scaled image) + if (!cached_image || (cached_image && cached_image->width() != max_width || cached_image->height() != max_height)) { + +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + // Only use resvg for files ending in '.svg' or '.svgz' + if (path.find(".svg") != std::string::npos || + path.find(".svgz") != std::string::npos) { + ResvgRenderer renderer(QString::fromStdString(path)); + if (renderer.isValid()) { + + cached_image = std::shared_ptr(new QImage(QSize(max_width, max_height), QImage::Format_RGBA8888)); + cached_image->fill(Qt::transparent); + + QPainter p(cached_image.get()); + renderer.render(&p); + p.end(); + } + } else { // We need to resize the original image to a smaller image (for performance reasons) // Only do this once, to prevent tons of unneeded scaling operations cached_image = std::shared_ptr(new QImage(image->scaled(max_width, max_height, Qt::KeepAspectRatio, Qt::SmoothTransformation))); cached_image = std::shared_ptr(new QImage(cached_image->convertToFormat(QImage::Format_RGBA8888))); } +#else + // We need to resize the original image to a smaller image (for performance reasons) + // Only do this once, to prevent tons of unneeded scaling operations + cached_image = std::shared_ptr(new QImage(image->scaled(max_width, max_height, Qt::KeepAspectRatio, Qt::SmoothTransformation))); + cached_image = std::shared_ptr(new QImage(cached_image->convertToFormat(QImage::Format_RGBA8888))); +#endif + } - // Create or get frame object - std::shared_ptr image_frame(new Frame(requested_frame, cached_image->width(), cached_image->height(), "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); - - // Add Image data to frame - image_frame->AddImage(cached_image); - - // return frame object - return image_frame; - - } else { - // Use original image (higher quality but slower) - // Create or get frame object - std::shared_ptr image_frame(new Frame(requested_frame, info.width, info.height, "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); + // Create or get frame object + std::shared_ptr image_frame(new Frame(requested_frame, cached_image->width(), cached_image->height(), "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); - // Add Image data to frame - image_frame->AddImage(image); + // Add Image data to frame + image_frame->AddImage(cached_image); - // return frame object - return image_frame; - } + // return frame object + return image_frame; } // Generate JSON string of this object diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/QtPlayer.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/QtPlayer.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/QtPlayer.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/QtPlayer.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -117,7 +117,7 @@ void QtPlayer::Seek(int64_t new_frame) { // Check for seek - if (new_frame > 0) { + if (reader && threads_started && new_frame > 0) { // Notify cache thread that seek has occurred p->videoCache->Seek(new_frame); @@ -138,11 +138,13 @@ mode = PLAYBACK_STOPPED; // Notify threads of stopping - p->videoCache->Stop(); - p->audioPlayback->Stop(); + if (reader && threads_started) { + p->videoCache->Stop(); + p->audioPlayback->Stop(); - // Kill all threads - p->stopPlayback(); + // Kill all threads + p->stopPlayback(); + } p->video_position = 0; threads_started = false; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/ReaderBase.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/ReaderBase.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/ReaderBase.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/ReaderBase.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -58,8 +58,9 @@ info.channel_layout = LAYOUT_MONO; info.audio_stream_index = -1; info.audio_timebase = Fraction(); - max_width = 0; - max_height = 0; + + // Init parent clip + parent = NULL; } // Display file information @@ -100,6 +101,13 @@ cout << "--> Audio Stream Index: " << info.audio_stream_index << endl; cout << "--> Audio Timebase: " << info.audio_timebase.ToDouble() << " (" << info.audio_timebase.num << "/" << info.audio_timebase.den << ")" << endl; cout << "----------------------------" << endl; + cout << "--------- Metadata ---------" << endl; + cout << "----------------------------" << endl; + + // Iterate through metadata + map::iterator it; + for (it = info.metadata.begin(); it != info.metadata.end(); it++) + cout << "--> " << it->first << ": " << it->second << endl; } // Generate Json::JsonValue for this object @@ -147,6 +155,12 @@ root["audio_timebase"]["num"] = info.audio_timebase.num; root["audio_timebase"]["den"] = info.audio_timebase.den; + // Append metadata map + root["metadata"] = Json::Value(Json::objectValue); + map::iterator it; + for (it = info.metadata.begin(); it != info.metadata.end(); it++) + root["metadata"][it->first] = it->second; + // return JsonValue return root; } @@ -226,4 +240,20 @@ if (!root["audio_timebase"]["den"].isNull()) info.audio_timebase.den = root["audio_timebase"]["den"].asInt(); } + if (!root["metadata"].isNull() && root["metadata"].isObject()) { + for( Json::Value::iterator itr = root["metadata"].begin() ; itr != root["metadata"].end() ; itr++ ) { + string key = itr.key().asString(); + info.metadata[key] = root["metadata"][key].asString(); + } + } +} + +/// Parent clip object of this reader (which can be unparented and NULL) +ClipBase* ReaderBase::GetClip() { + return parent; +} + +/// Set parent clip object of this reader +void ReaderBase::SetClip(ClipBase* clip) { + parent = clip; } diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Settings.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Settings.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Settings.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Settings.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,52 @@ +/** + * @file + * @brief Source file for global Settings class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../include/Settings.h" + +using namespace std; +using namespace openshot; + + +// Global reference to logger +Settings *Settings::m_pInstance = NULL; + +// Create or Get an instance of the logger singleton +Settings *Settings::Instance() +{ + if (!m_pInstance) { + // Create the actual instance of logger only once + m_pInstance = new Settings; + m_pInstance->HARDWARE_DECODE = false; + m_pInstance->HARDWARE_ENCODE = false; + m_pInstance->HIGH_QUALITY_SCALING = false; + m_pInstance->MAX_WIDTH = 0; + m_pInstance->MAX_HEIGHT = 0; + m_pInstance->WAIT_FOR_VIDEO_PROCESSING_TASK = false; + } + + return m_pInstance; +} diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/TextReader.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/TextReader.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/TextReader.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/TextReader.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -154,7 +154,7 @@ std::shared_ptr copy_image(new Magick::Image(*image.get())); copy_image->modifyImage(); // actually copy the image data to this object //TODO: Reimplement this with QImage - //image_frame->AddImage(copy_image); + image_frame->AddMagickImage(copy_image); // return frame object return image_frame; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Timeline.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Timeline.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/src/Timeline.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/src/Timeline.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -60,7 +60,7 @@ info.video_length = info.fps.ToFloat() * info.duration; // Init max image size - SetMaxSize(info.width, info.height); + SetMaxSize(info.width, info.height); // Init cache final_cache = new CacheMemory(); @@ -127,16 +127,6 @@ FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader; clip_mapped_reader->ChangeMapping(info.fps, PULLDOWN_NONE, info.sample_rate, info.channels, info.channel_layout); - // Update timeline offset - float time_diff = 0 - clip->Position() + clip->Start(); - int clip_offset = -round(time_diff * info.fps.ToFloat()); - - if (clip_offset != 0) - // Reduce negative offset by 1 (since we want to avoid frame 0) - clip_offset += 1; - - clip_mapped_reader->SetTimelineFrameOffset(clip_offset); - // Update clip reader clip->Reader(clip_reader); } @@ -223,9 +213,6 @@ // Debug output ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - // Set max image size (used for performance optimization) - clip->SetMaxSize(info.width, info.height); - // Attempt to get a frame (but this could fail if a reader has just been closed) #pragma omp critical (T_GetOtCreateFrame) new_frame = std::shared_ptr(clip->GetFrame(number)); @@ -245,7 +232,7 @@ ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); // Create blank frame - new_frame = std::make_shared(number, max_width, max_height, "#000000", samples_in_frame, info.channels); + new_frame = std::make_shared(number, Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, "#000000", samples_in_frame, info.channels); #pragma omp critical (T_GetOtCreateFrame) { new_frame->SampleRate(info.sample_rate); @@ -255,7 +242,7 @@ } // Process a new layer of video or audio -void Timeline::add_layer(std::shared_ptr new_frame, Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number, bool is_top_clip) +void Timeline::add_layer(std::shared_ptr new_frame, Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number, bool is_top_clip, float max_volume) { // Get the clip's frame & image std::shared_ptr source_frame; @@ -284,13 +271,14 @@ // Generate Waveform Dynamically (the size of the timeline) std::shared_ptr source_image; #pragma omp critical (T_addLayer) - source_image = source_frame->GetWaveform(max_width, max_height, red, green, blue, alpha); + source_image = source_frame->GetWaveform(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, red, green, blue, alpha); source_frame->AddImage(std::shared_ptr(source_image)); } /* Apply effects to the source frame (if any). If multiple clips are overlapping, only process the * effects on the top clip. */ if (is_top_clip && source_frame) + #pragma omp critical (T_addLayer) source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->Layer()); // Declare an image to hold the source frame's image @@ -298,33 +286,44 @@ /* COPY AUDIO - with correct volume */ if (source_clip->Reader()->info.has_audio) { - // Debug output ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Copy Audio)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1); - if (source_frame->GetAudioChannelsCount() == info.channels) + if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0) for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++) { - float initial_volume = 1.0f; - float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1); // previous frame's percentage of volume (0 to 1) - float volume = source_clip->volume.GetValue(clip_frame_number); // percentage of volume (0 to 1) + // Get volume from previous frame and this frame + float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1); + float volume = source_clip->volume.GetValue(clip_frame_number); int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1) int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1) + // Apply volume mixing strategy + if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) { + // Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume + previous_volume = previous_volume / max_volume; + volume = volume / max_volume; + } + else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) { + // Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will) + previous_volume = previous_volume * 0.77; + volume = volume * 0.77; + } + // If channel filter enabled, check for correct channel (and skip non-matching channels) if (channel_filter != -1 && channel_filter != channel) continue; // skip to next channel + // If no volume on this frame or previous frame, do nothing + if (previous_volume == 0.0 && volume == 0.0) + continue; // skip to next channel + // If channel mapping disabled, just use the current channel if (channel_mapping == -1) channel_mapping = channel; - // If no ramp needed, set initial volume = clip's volume - if (isEqual(previous_volume, volume)) - initial_volume = volume; - // Apply ramp to source frame (if needed) - if (!isEqual(previous_volume, volume)) + if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0)) source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume); // TODO: Improve FrameMapper (or Timeline) to always get the correct number of samples per frame. @@ -339,7 +338,7 @@ // Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to // be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen). #pragma omp critical (T_addLayer) - new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), initial_volume); + new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0); } else @@ -385,35 +384,48 @@ QSize source_size = source_image->size(); switch (source_clip->scale) { - case (SCALE_FIT): - // keep aspect ratio - source_size.scale(max_width, max_height, Qt::KeepAspectRatio); + case (SCALE_FIT): { + // keep aspect ratio + source_size.scale(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, Qt::KeepAspectRatio); - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_FIT)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; - - case (SCALE_STRETCH): - // ignore aspect ratio - source_size.scale(max_width, max_height, Qt::IgnoreAspectRatio); + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_FIT)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); + break; + } + case (SCALE_STRETCH): { + // ignore aspect ratio + source_size.scale(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, Qt::IgnoreAspectRatio); - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_STRETCH)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_STRETCH)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); + break; + } + case (SCALE_CROP): { + QSize width_size(Settings::Instance()->MAX_WIDTH, round(Settings::Instance()->MAX_WIDTH / (float(source_size.width()) / float(source_size.height())))); + QSize height_size(round(Settings::Instance()->MAX_HEIGHT / (float(source_size.height()) / float(source_size.width()))), Settings::Instance()->MAX_HEIGHT); + + // respect aspect ratio + if (width_size.width() >= Settings::Instance()->MAX_WIDTH && width_size.height() >= Settings::Instance()->MAX_HEIGHT) + source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio); + else + source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio); - case (SCALE_CROP): - QSize width_size(max_width, round(max_width / (float(source_size.width()) / float(source_size.height())))); - QSize height_size(round(max_height / (float(source_size.height()) / float(source_size.width()))), max_height); - - // respect aspect ratio - if (width_size.width() >= max_width && width_size.height() >= max_height) - source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio); - else - source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio); + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_CROP)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); + break; + } + case (SCALE_NONE): { + // Calculate ratio of source size to project size + // Even with no scaling, previews need to be adjusted correctly + // (otherwise NONE scaling draws the frame image outside of the preview) + float source_width_ratio = source_size.width() / float(info.width); + float source_height_ratio = source_size.height() / float(info.height); + source_size.scale(Settings::Instance()->MAX_WIDTH * source_width_ratio, Settings::Instance()->MAX_HEIGHT * source_height_ratio, Qt::KeepAspectRatio); - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_CROP)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_NONE)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); + break; + } } /* GRAVITY LOCATION - Initialize X & Y to the correct values (before applying location curves) */ @@ -429,32 +441,32 @@ switch (source_clip->gravity) { case (GRAVITY_TOP): - x = (max_width - scaled_source_width) / 2.0; // center + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center break; case (GRAVITY_TOP_RIGHT): - x = max_width - scaled_source_width; // right + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right break; case (GRAVITY_LEFT): - y = (max_height - scaled_source_height) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_CENTER): - x = (max_width - scaled_source_width) / 2.0; // center - y = (max_height - scaled_source_height) / 2.0; // center + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_RIGHT): - x = max_width - scaled_source_width; // right - y = (max_height - scaled_source_height) / 2.0; // center + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_BOTTOM_LEFT): - y = (max_height - scaled_source_height); // bottom + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; case (GRAVITY_BOTTOM): - x = (max_width - scaled_source_width) / 2.0; // center - y = (max_height - scaled_source_height); // bottom + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; case (GRAVITY_BOTTOM_RIGHT): - x = max_width - scaled_source_width; // right - y = (max_height - scaled_source_height); // bottom + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; } @@ -463,8 +475,8 @@ /* LOCATION, ROTATION, AND SCALE */ float r = source_clip->rotation.GetValue(clip_frame_number); // rotate in degrees - x += (max_width * source_clip->location_x.GetValue(clip_frame_number)); // move in percentage of final width - y += (max_height * source_clip->location_y.GetValue(clip_frame_number)); // move in percentage of final height + x += (Settings::Instance()->MAX_WIDTH * source_clip->location_x.GetValue(clip_frame_number)); // move in percentage of final width + y += (Settings::Instance()->MAX_HEIGHT * source_clip->location_y.GetValue(clip_frame_number)); // move in percentage of final height float shear_x = source_clip->shear_x.GetValue(clip_frame_number); float shear_y = source_clip->shear_y.GetValue(clip_frame_number); @@ -575,8 +587,13 @@ // Add clip to 'opened' list, because it's missing open_clips[clip] = clip; - // Open the clip - clip->Open(); + try { + // Open the clip + clip->Open(); + + } catch (const InvalidFile & e) { + // ... + } } // Debug output @@ -716,7 +733,7 @@ #pragma omp parallel { // Loop through all requested frames - #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) + #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) schedule(static,1) for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++) { // Debug output @@ -726,7 +743,7 @@ int samples_in_frame = Frame::GetSamplesPerFrame(frame_number, info.fps, info.sample_rate, info.channels); // Create blank frame (which will become the requested frame) - std::shared_ptr new_frame(std::make_shared(frame_number, max_width, max_height, "#000000", samples_in_frame, info.channels)); + std::shared_ptr new_frame(std::make_shared(frame_number, Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, "#000000", samples_in_frame, info.channels)); #pragma omp critical (T_GetFrame) { new_frame->AddAudioSilence(samples_in_frame); @@ -740,7 +757,7 @@ // Add Background Color to 1st layer (if animated or not black) if ((color.red.Points.size() > 1 || color.green.Points.size() > 1 || color.blue.Points.size() > 1) || (color.red.GetValue(frame_number) != 0.0 || color.green.GetValue(frame_number) != 0.0 || color.blue.GetValue(frame_number) != 0.0)) - new_frame->AddColor(max_width, max_height, color.GetColorHex(frame_number)); + new_frame->AddColor(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, color.GetColorHex(frame_number)); // Debug output ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "frame_number", frame_number, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size(), "", -1, "", -1, "", -1); @@ -763,17 +780,27 @@ { // Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping) bool is_top_clip = true; + float max_volume = 0.0; for (int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++) { Clip *nearby_clip = nearby_clips[top_clip_index]; long nearby_clip_start_position = round(nearby_clip->Position() * info.fps.ToDouble()) + 1; long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) * info.fps.ToDouble()) + 1; + long nearby_clip_start_frame = (nearby_clip->Start() * info.fps.ToDouble()) + 1; + long nearby_clip_frame_number = frame_number - nearby_clip_start_position + nearby_clip_start_frame; + // Determine if top clip if (clip->Id() != nearby_clip->Id() && clip->Layer() == nearby_clip->Layer() && nearby_clip_start_position <= frame_number && nearby_clip_end_position >= frame_number && - nearby_clip_start_position > clip_start_position) { + nearby_clip_start_position > clip_start_position && is_top_clip == true) { is_top_clip = false; - break; + } + + // Determine max volume of overlapping clips + if (nearby_clip->Reader() && nearby_clip->Reader()->info.has_audio && + nearby_clip->has_audio.GetInt(nearby_clip_frame_number) != 0 && + nearby_clip_start_position <= frame_number && nearby_clip_end_position >= frame_number) { + max_volume += nearby_clip->volume.GetValue(nearby_clip_frame_number); } } @@ -785,7 +812,7 @@ ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Calculate clip's frame #)", "clip->Position()", clip->Position(), "clip->Start()", clip->Start(), "info.fps.ToFloat()", info.fps.ToFloat(), "clip_frame_number", clip_frame_number, "", -1, "", -1); // Add clip's frame as layer - add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip); + add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip, max_volume); } else // Debug output @@ -797,7 +824,7 @@ ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Add frame to cache)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1); // Set frame # on mapped frame - #pragma omp critical (T_GetFrame) + #pragma omp ordered { new_frame->SetFrameNumber(frame_number); @@ -989,13 +1016,14 @@ if (!existing_effect["type"].isNull()) { // Create instance of effect - e = EffectInfo().CreateEffect(existing_effect["type"].asString()); + if (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) { - // Load Json into Effect - e->SetJsonValue(existing_effect); + // Load Json into Effect + e->SetJsonValue(existing_effect); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } } } @@ -1142,6 +1170,9 @@ clip->SetJsonValue(change["value"]); // Set properties of new clip from JSON AddClip(clip); // Add clip to timeline + // Apply framemapper (or update existing framemapper) + apply_mapper_to_clip(clip); + } else if (change_type == "update") { // Update existing clip @@ -1159,16 +1190,8 @@ // Update clip properties from JSON existing_clip->SetJsonValue(change["value"]); - // Clear any cached image sizes (since size might have changed) - existing_clip->SetMaxSize(0, 0); // force clearing of cached image size - if (existing_clip->Reader()) { - existing_clip->Reader()->SetMaxSize(0, 0); - if (existing_clip->Reader()->Name() == "FrameMapper") { - FrameMapper *nested_reader = (FrameMapper *) existing_clip->Reader(); - if (nested_reader->Reader()) - nested_reader->Reader()->SetMaxSize(0, 0); - } - } + // Apply framemapper (or update existing framemapper) + apply_mapper_to_clip(existing_clip); } } else if (change_type == "delete") { @@ -1253,13 +1276,14 @@ EffectBase *e = NULL; // Init the matching effect object - e = EffectInfo().CreateEffect(effect_type); + if (e = EffectInfo().CreateEffect(effect_type)) { - // Load Json into Effect - e->SetJsonValue(change["value"]); + // Load Json into Effect + e->SetJsonValue(change["value"]); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } else if (change_type == "update") { @@ -1414,3 +1438,11 @@ } } + +// Set Max Image Size (used for performance optimization). Convenience function for setting +// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT. +void Timeline::SetMaxSize(int width, int height) { + // Init max image size (choose the smallest one) + Settings::Instance()->MAX_WIDTH = min(width, info.width); + Settings::Instance()->MAX_HEIGHT = min(height, info.height); +} \ No newline at end of file diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/CMakeLists.txt libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/CMakeLists.txt --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/CMakeLists.txt 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/CMakeLists.txt 2019-03-21 07:31:31.000000000 +0000 @@ -24,16 +24,19 @@ # along with OpenShot Library. If not, see . ################################################################################ +SET(TEST_MEDIA_PATH "${openshot_SOURCE_DIR}/src/examples/") ################ WINDOWS ################## # Set some compiler options for Windows # required for libopenshot-audio headers IF (WIN32) - STRING(REPLACE "/" "\\\\" TEST_MEDIA_PATH "${openshot_SOURCE_DIR}/src/examples/") - add_definitions( -DIGNORE_JUCE_HYPOT=1 -DTEST_MEDIA_PATH="${TEST_MEDIA_PATH}" ) + STRING(REPLACE "/" "\\\\" TEST_MEDIA_PATH TEST_MEDIA_PATH) + add_definitions( -DIGNORE_JUCE_HYPOT=1 ) SET(CMAKE_CXX_FLAGS " ${CMAKE_CXX_FLAGS} -include cmath") ENDIF(WIN32) +add_definitions( -DTEST_MEDIA_PATH="${TEST_MEDIA_PATH}" ) + ################### UNITTEST++ ##################### # Find UnitTest++ libraries (used for unit testing) FIND_PACKAGE(UnitTest++ REQUIRED) @@ -76,7 +79,33 @@ FIND_PACKAGE(FFmpeg REQUIRED) # Include FFmpeg headers (needed for compile) -include_directories(${FFMPEG_INCLUDE_DIR}) +IF (AVCODEC_FOUND) + include_directories(${AVCODEC_INCLUDE_DIRS}) +ENDIF (AVCODEC_FOUND) +IF (AVDEVICE_FOUND) + include_directories(${AVDEVICE_INCLUDE_DIRS}) +ENDIF (AVDEVICE_FOUND) +IF (AVFORMAT_FOUND) + include_directories(${AVFORMAT_INCLUDE_DIRS}) +ENDIF (AVFORMAT_FOUND) +IF (AVFILTER_FOUND) + include_directories(${AVFILTER_INCLUDE_DIRS}) +ENDIF (AVFILTER_FOUND) +IF (AVUTIL_FOUND) + include_directories(${AVUTIL_INCLUDE_DIRS}) +ENDIF (AVUTIL_FOUND) +IF (POSTPROC_FOUND) + include_directories(${POSTPROC_INCLUDE_DIRS}) +ENDIF (POSTPROC_FOUND) +IF (SWSCALE_FOUND) + include_directories(${SWSCALE_INCLUDE_DIRS}) +ENDIF (SWSCALE_FOUND) +IF (SWRESAMPLE_FOUND) + include_directories(${SWRESAMPLE_INCLUDE_DIRS}) +ENDIF (SWRESAMPLE_FOUND) +IF (AVRESAMPLE_FOUND) + include_directories(${AVRESAMPLE_INCLUDE_DIRS}) +ENDIF (AVRESAMPLE_FOUND) ################# LIBOPENSHOT-AUDIO ################### # Find JUCE-based openshot Audio libraries @@ -147,12 +176,26 @@ # Find ZeroMQ library (used for socket communication & logging) FIND_PACKAGE(ZMQ REQUIRED) -# Include FFmpeg headers (needed for compile) +# Include ZeroMQ headers (needed for compile) include_directories(${ZMQ_INCLUDE_DIRS}) +################### RESVG ##################### +# Find resvg library (used for rendering svg files) +FIND_PACKAGE(RESVG) + +# Include resvg headers (optional SVG library) +if (RESVG_FOUND) + include_directories(${RESVG_INCLUDE_DIRS}) +endif(RESVG_FOUND) + ################### JSONCPP ##################### # Include jsoncpp headers (needed for JSON parsing) -include_directories("../thirdparty/jsoncpp/include") +if (USE_SYSTEM_JSONCPP) + find_package(JsonCpp REQUIRED) + include_directories(${JSONCPP_INCLUDE_DIRS}) +else() + include_directories("../thirdparty/jsoncpp/include") +endif(USE_SYSTEM_JSONCPP) IF (NOT DISABLE_TESTS) ############### SET TEST SOURCE FILES ################# @@ -169,6 +212,7 @@ FrameMapper_Tests.cpp KeyFrame_Tests.cpp Point_Tests.cpp + Settings_Tests.cpp Timeline_Tests.cpp ) ################ TESTER EXECUTABLE ################# @@ -184,4 +228,4 @@ #################### MAKE TEST ###################### # Hook up the 'make test' target to the 'openshot-test' executable ADD_CUSTOM_TARGET(test ${CMAKE_CURRENT_BINARY_DIR}/openshot-test) -ENDIF (NOT DISABLE_TESTS) \ No newline at end of file +ENDIF (NOT DISABLE_TESTS) diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/FFmpegReader_Tests.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/FFmpegReader_Tests.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/FFmpegReader_Tests.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/FFmpegReader_Tests.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -72,7 +72,7 @@ CHECK_CLOSE(0.0f, samples[50], 0.00001); CHECK_CLOSE(0.0f, samples[100], 0.00001); CHECK_CLOSE(0.0f, samples[200], 0.00001); - CHECK_CLOSE(0.160781, samples[230], 0.00001); + CHECK_CLOSE(0.160781f, samples[230], 0.00001); CHECK_CLOSE(-0.06125f, samples[300], 0.00001); // Close reader diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/FrameMapper_Tests.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/FrameMapper_Tests.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/FrameMapper_Tests.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/FrameMapper_Tests.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -199,9 +199,9 @@ // Check details CHECK_EQUAL(1, map.GetFrame(1)->GetAudioChannelsCount()); - CHECK_EQUAL(882, map.GetFrame(1)->GetAudioSamplesCount()); - CHECK_EQUAL(882, map.GetFrame(2)->GetAudioSamplesCount()); - CHECK_EQUAL(882, map.GetFrame(50)->GetAudioSamplesCount()); + CHECK_CLOSE(882, map.GetFrame(1)->GetAudioSamplesCount(), 10.0); + CHECK_CLOSE(882, map.GetFrame(2)->GetAudioSamplesCount(), 10.0); + CHECK_CLOSE(882, map.GetFrame(50)->GetAudioSamplesCount(), 10.0); // Close mapper map.Close(); diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/KeyFrame_Tests.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/KeyFrame_Tests.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/KeyFrame_Tests.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/KeyFrame_Tests.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -377,4 +377,20 @@ // Spot check values from the curve CHECK_EQUAL(kf.GetLength(), 1); CHECK_CLOSE(kf.GetPoint(0).co.Y, 2.0, 0.01); +} + +TEST(Keyframe_Large_Number_Values) +{ + // Large value + int64_t large_value = 30 * 60 * 90; + + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 1.0); + kf.AddPoint(large_value, 100.0); // 90 minutes long + + // Spot check values from the curve + CHECK_EQUAL(kf.GetLength(), large_value + 1); + CHECK_CLOSE(kf.GetPoint(0).co.Y, 1.0, 0.01); + CHECK_CLOSE(kf.GetPoint(1).co.Y, 100.0, 0.01); } \ No newline at end of file diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/ReaderBase_Tests.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/ReaderBase_Tests.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/ReaderBase_Tests.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/ReaderBase_Tests.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -44,9 +44,9 @@ std::shared_ptr GetFrame(int64_t number) { std::shared_ptr f(new Frame()); return f; } void Close() { }; void Open() { }; - string Json() { }; + string Json() { return NULL; }; void SetJson(string value) { }; - Json::Value JsonValue() { }; + Json::Value JsonValue() { return (int) NULL; }; void SetJsonValue(Json::Value root) { }; bool IsOpen() { return true; }; string Name() { return "TestReader"; }; diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/Settings_Tests.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/Settings_Tests.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/Settings_Tests.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/Settings_Tests.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,63 @@ +/** + * @file + * @brief Unit tests for openshot::Color + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "UnitTest++.h" +#include "../include/OpenShot.h" + +using namespace std; +using namespace openshot; + +TEST(Settings_Default_Constructor) +{ + // Create an empty color + Settings *s = Settings::Instance(); + + CHECK_EQUAL(false, s->HARDWARE_DECODE); + CHECK_EQUAL(false, s->HARDWARE_ENCODE); + CHECK_EQUAL(false, s->HIGH_QUALITY_SCALING); + CHECK_EQUAL(false, s->WAIT_FOR_VIDEO_PROCESSING_TASK); +} + +TEST(Settings_Change_Settings) +{ + // Create an empty color + Settings *s = Settings::Instance(); + s->HARDWARE_DECODE = true; + s->HARDWARE_ENCODE = true; + s->HIGH_QUALITY_SCALING = true; + s->WAIT_FOR_VIDEO_PROCESSING_TASK = true; + + CHECK_EQUAL(true, s->HARDWARE_DECODE); + CHECK_EQUAL(true, s->HARDWARE_ENCODE); + CHECK_EQUAL(true, s->HIGH_QUALITY_SCALING); + CHECK_EQUAL(true, s->WAIT_FOR_VIDEO_PROCESSING_TASK); + + CHECK_EQUAL(true, s->HARDWARE_DECODE); + CHECK_EQUAL(true, s->HARDWARE_ENCODE); + CHECK_EQUAL(true, Settings::Instance()->HIGH_QUALITY_SCALING); + CHECK_EQUAL(true, Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK); +} \ No newline at end of file diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/Timeline_Tests.cpp libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/Timeline_Tests.cpp --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/tests/Timeline_Tests.cpp 2017-11-23 22:40:10.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/tests/Timeline_Tests.cpp 2019-03-21 07:31:31.000000000 +0000 @@ -119,7 +119,7 @@ int pixel_index = 230 * 4; // pixel 230 (4 bytes per pixel) // Check image properties - CHECK_EQUAL(21, f->GetPixels(pixel_row)[pixel_index]); + CHECK_EQUAL(21, (int)f->GetPixels(pixel_row)[pixel_index]); CHECK_EQUAL(191, (int)f->GetPixels(pixel_row)[pixel_index + 1]); CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); @@ -439,4 +439,4 @@ // Close reader t.Close(); -} \ No newline at end of file +} diff -Nru libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.travis.yml libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.travis.yml --- libopenshot-0.1.9+0+625+119+201711232240+daily~ubuntu17.04.1/.travis.yml 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.3+dfsg2+714+201903210731+daily~ubuntu18.04.1/.travis.yml 2019-03-21 07:31:31.000000000 +0000 @@ -0,0 +1,49 @@ +dist: trusty + +matrix: + include: + - language: cpp + name: "FFmpeg 2" + before_script: + - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y + - sudo add-apt-repository ppa:beineri/opt-qt58-trusty -y + - sudo apt-get update -qq + - sudo apt-get install gcc-4.8 cmake libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y + - sudo apt autoremove -y + script: + - mkdir -p build; cd build; + - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ + - make VERBOSE=1 + - make test + + - language: cpp + name: "FFmpeg 3" + before_script: + - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y + - sudo add-apt-repository ppa:beineri/opt-qt58-trusty -y + - sudo add-apt-repository ppa:jonathonf/ffmpeg-3 -y + - sudo apt-get update -qq + - sudo apt-get install gcc-4.8 cmake libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y + - sudo apt autoremove -y + script: + - mkdir -p build; cd build; + - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ + - make VERBOSE=1 + - make test + + - language: cpp + name: "FFmpeg 4" + before_script: + - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y + - sudo add-apt-repository ppa:beineri/opt-qt58-trusty -y + - sudo add-apt-repository ppa:jonathonf/ffmpeg -y + - sudo add-apt-repository ppa:jonathonf/ffmpeg-4 -y + - sudo add-apt-repository ppa:jonathonf/backports -y + - sudo apt-get update -qq + - sudo apt-get install gcc-4.8 cmake libavcodec58 libavformat58 libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y + - sudo apt autoremove -y + script: + - mkdir -p build; cd build; + - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ + - make VERBOSE=1 + - make test